[med-svn] [mypy] 05/06: New upstream version 0.480.dev0

Michael Crusoe misterc-guest at moszumanska.debian.org
Thu Feb 2 08:22:57 UTC 2017


This is an automated email from the git hooks/post-receive script.

misterc-guest pushed a commit to branch master
in repository mypy.

commit 111e209f793fc4bb2851cce7965e1e4c8a677166
Author: Michael R. Crusoe <michael.crusoe at gmail.com>
Date:   Thu Feb 2 00:20:40 2017 -0800

    New upstream version 0.480.dev0
---
 .gitignore                                         |   29 +
 .gitmodules                                        |    3 +
 .travis.yml                                        |   20 +
 CONTRIBUTING.md                                    |  152 +
 CREDITS                                            |  101 +
 LICENSE                                            |  228 ++
 PKG-INFO                                           |    2 +-
 README.md                                          |  310 ++
 appveyor.yml                                       |   32 +
 build-requirements.txt                             |    2 +
 conftest.py                                        |    3 +
 docs/Makefile                                      |  177 +
 docs/README.md                                     |   49 +
 docs/make.bat                                      |  242 ++
 docs/requirements-docs.txt                         |    2 +
 docs/source/additional_features.rst                |    9 +
 docs/source/basics.rst                             |  194 ++
 docs/source/builtin_types.rst                      |   37 +
 docs/source/casts.rst                              |   39 +
 docs/source/cheat_sheet.rst                        |  241 ++
 docs/source/cheat_sheet_py3.rst                    |  288 ++
 docs/source/class_basics.rst                       |  157 +
 docs/source/command_line.rst                       |  413 +++
 docs/source/common_issues.rst                      |  386 +++
 docs/source/conf.py                                |  268 ++
 docs/source/config_file.rst                        |  184 ++
 docs/source/duck_type_compatibility.rst            |   40 +
 docs/source/dynamic_typing.rst                     |   86 +
 docs/source/faq.rst                                |  270 ++
 docs/source/function_overloading.rst               |   60 +
 docs/source/generics.rst                           |  390 +++
 docs/source/getting_started.rst                    |   24 +
 docs/source/index.rst                              |   42 +
 docs/source/introduction.rst                       |   30 +
 docs/source/kinds_of_types.rst                     | 1002 ++++++
 docs/source/python2.rst                            |  136 +
 docs/source/python36.rst                           |  100 +
 docs/source/revision_history.rst                   |  176 +
 docs/source/supported_python_features.rst          |   20 +
 docs/source/type_inference_and_annotations.rst     |  172 +
 extensions/README.md                               |    6 +
 extensions/mypy_extensions.py                      |   97 +
 extensions/setup.py                                |   44 +
 lib-typing/2.7/setup.py                            |   46 +
 lib-typing/2.7/test_typing.py                      | 1629 ++++++++++
 lib-typing/2.7/typing.py                           | 2016 ++++++++++++
 lib-typing/3.2/test_typing.py                      | 2063 ++++++++++++
 lib-typing/3.2/typing.py                           | 2160 +++++++++++++
 misc/actions_stubs.py                              |  111 +
 misc/analyze_cache.py                              |  189 ++
 misc/async_matrix.py                               |  120 +
 misc/fix_annotate.py                               |  219 ++
 misc/incremental_checker.py                        |  356 ++
 misc/macs.el                                       |   22 +
 misc/perf_checker.py                               |   93 +
 misc/remove-eol-whitespace.sh                      |    8 +
 misc/test_case_to_actual.py                        |   71 +
 misc/touch_checker.py                              |  151 +
 misc/variadics.py                                  |   54 +
 mypy.egg-info/PKG-INFO                             |    2 +-
 mypy.egg-info/SOURCES.txt                          |  416 ++-
 mypy/api.py                                        |   32 +-
 mypy/build.py                                      |    8 +-
 mypy/checker.py                                    |  400 ++-
 mypy/checkexpr.py                                  |  179 +-
 mypy/checkmember.py                                |   27 +-
 mypy/constraints.py                                |   62 +-
 mypy/errors.py                                     |    2 +-
 mypy/expandtype.py                                 |   34 +-
 mypy/fastparse.py                                  |  213 +-
 mypy/fastparse2.py                                 |  271 +-
 mypy/main.py                                       |  126 +-
 mypy/messages.py                                   |   10 +-
 mypy/myunit/__init__.py                            |  380 +++
 mypy/myunit/__main__.py                            |   18 +
 mypy/nodes.py                                      |  102 +-
 mypy/options.py                                    |    6 +-
 mypy/sametypes.py                                  |   13 +-
 mypy/semanal.py                                    |   98 +-
 mypy/strconv.py                                    |   12 +-
 .../2/xml/__init__.pyi => mypy/test/__init__.py    |    0
 .../xml/etree/__init__.pyi => mypy/test/collect.py |    0
 mypy/test/config.py                                |   19 +
 mypy/test/data.py                                  |  483 +++
 mypy/test/helpers.py                               |  285 ++
 mypy/test/testargs.py                              |   18 +
 mypy/test/testcheck.py                             |  332 ++
 mypy/test/testcmdline.py                           |  104 +
 mypy/test/testextensions.py                        |  125 +
 mypy/test/testgraph.py                             |   69 +
 mypy/test/testinfer.py                             |  223 ++
 mypy/test/testlex.py                               |  466 +++
 mypy/test/testmoduleinfo.py                        |   14 +
 mypy/test/testparse.py                             |   79 +
 mypy/test/testpythoneval.py                        |  135 +
 mypy/test/testreports.py                           |   40 +
 mypy/test/testsemanal.py                           |  224 ++
 mypy/test/testsolve.py                             |  156 +
 mypy/test/teststubgen.py                           |  186 ++
 mypy/test/testsubtypes.py                          |  208 ++
 mypy/test/testtransform.py                         |   85 +
 mypy/test/testtypegen.py                           |  128 +
 mypy/test/testtypes.py                             |  847 +++++
 .../3.2/xml/__init__.pyi => mypy/test/update.py    |    0
 mypy/traverser.py                                  |    2 +
 mypy/treetransform.py                              |    8 +-
 mypy/typefixture.py                                |   18 +-
 mypy/types.py                                      |    7 +-
 mypy/typevars.py                                   |   24 +
 mypy/version.py                                    |    2 +-
 mypy/visitor.py                                    |  171 +-
 mypy_self_check.ini                                |    8 +
 mypy_strict_optional.ini                           |    5 +
 pinfer/.gitignore                                  |    3 +
 pinfer/LICENSE                                     |   27 +
 pinfer/README                                      |   47 +
 .../xml/etree/__init__.pyi => pinfer/__init__.py   |    0
 pinfer/inspect3.py                                 |  122 +
 pinfer/p.py                                        |   83 +
 pinfer/pinfer.py                                   |  686 ++++
 pinfer/test_pinfer.py                              |  302 ++
 pinfer/test_pinfer3.py                             |   31 +
 pinfer/unparse.py                                  |  610 ++++
 pinfer/unparse3.py                                 |  610 ++++
 pytest.ini                                         |   14 +
 runtests.py                                        |  428 +++
 setup.cfg                                          |    4 +-
 test-data/.flake8                                  |   21 +
 test-data/samples/bottles.py                       |   13 +
 test-data/samples/class.py                         |   18 +
 test-data/samples/cmdline.py                       |    8 +
 test-data/samples/crawl.py                         |  863 +++++
 test-data/samples/crawl2.py                        |  852 +++++
 test-data/samples/dict.py                          |    8 +
 test-data/samples/fib.py                           |    5 +
 test-data/samples/files.py                         |   14 +
 test-data/samples/for.py                           |    4 +
 test-data/samples/generators.py                    |   24 +
 test-data/samples/greet.py                         |    8 +
 test-data/samples/guess.py                         |   32 +
 test-data/samples/hello.py                         |    2 +
 test-data/samples/input.py                         |    3 +
 test-data/samples/itertool.py                      |   16 +
 test-data/samples/readme.txt                       |   25 +
 test-data/samples/regexp.py                        |    7 +
 test-data/stdlib-samples/3.2/base64.py             |  411 +++
 test-data/stdlib-samples/3.2/fnmatch.py            |  112 +
 test-data/stdlib-samples/3.2/genericpath.py        |  112 +
 test-data/stdlib-samples/3.2/getopt.py             |  220 ++
 test-data/stdlib-samples/3.2/glob.py               |   84 +
 .../3.2/incomplete/logging/__init__.py             | 1873 +++++++++++
 .../3.2/incomplete/urllib/__init__.py              |    0
 .../stdlib-samples/3.2/incomplete/urllib/parse.py  |  980 ++++++
 test-data/stdlib-samples/3.2/posixpath.py          |  466 +++
 test-data/stdlib-samples/3.2/pprint.py             |  380 +++
 test-data/stdlib-samples/3.2/random.py             |  743 +++++
 test-data/stdlib-samples/3.2/shutil.py             |  790 +++++
 test-data/stdlib-samples/3.2/subprocess.py         | 1703 ++++++++++
 test-data/stdlib-samples/3.2/tempfile.py           |  717 +++++
 .../stdlib-samples/3.2/test/__init__.py            |    0
 test-data/stdlib-samples/3.2/test/randv2_32.pck    |  633 ++++
 test-data/stdlib-samples/3.2/test/randv2_64.pck    |  633 ++++
 test-data/stdlib-samples/3.2/test/randv3.pck       |  633 ++++
 .../3.2/test/subprocessdata/fd_status.py           |   24 +
 .../3.2/test/subprocessdata/input_reader.py        |    7 +
 .../stdlib-samples/3.2/test/subprocessdata/qcat.py |    7 +
 .../3.2/test/subprocessdata/qgrep.py               |   10 +
 .../3.2/test/subprocessdata/sigchild_ignore.py     |    6 +
 test-data/stdlib-samples/3.2/test/support.py       | 1602 +++++++++
 test-data/stdlib-samples/3.2/test/test_base64.py   |  267 ++
 test-data/stdlib-samples/3.2/test/test_fnmatch.py  |   93 +
 .../stdlib-samples/3.2/test/test_genericpath.py    |  313 ++
 test-data/stdlib-samples/3.2/test/test_getopt.py   |  190 ++
 test-data/stdlib-samples/3.2/test/test_glob.py     |  122 +
 .../stdlib-samples/3.2/test/test_posixpath.py      |  531 +++
 test-data/stdlib-samples/3.2/test/test_pprint.py   |  488 +++
 test-data/stdlib-samples/3.2/test/test_random.py   |  533 +++
 test-data/stdlib-samples/3.2/test/test_set.py      | 1884 +++++++++++
 test-data/stdlib-samples/3.2/test/test_shutil.py   |  978 ++++++
 .../stdlib-samples/3.2/test/test_subprocess.py     | 1764 ++++++++++
 test-data/stdlib-samples/3.2/test/test_tempfile.py | 1122 +++++++
 test-data/stdlib-samples/3.2/test/test_textwrap.py |  601 ++++
 .../stdlib-samples/3.2/test/tf_inherit_check.py    |   25 +
 test-data/stdlib-samples/3.2/textwrap.py           |  391 +++
 test-data/unit/check-abstract.test                 |  734 +++++
 test-data/unit/check-async-await.test              |  393 +++
 test-data/unit/check-basic.test                    |  310 ++
 test-data/unit/check-bound.test                    |  203 ++
 test-data/unit/check-callable.test                 |  345 ++
 test-data/unit/check-class-namedtuple.test         |  378 +++
 test-data/unit/check-classes.test                  | 2761 ++++++++++++++++
 test-data/unit/check-columns.test                  |   68 +
 test-data/unit/check-dynamic-typing.test           |  676 ++++
 test-data/unit/check-expressions.test              | 1652 ++++++++++
 test-data/unit/check-fastparse.test                |  301 ++
 test-data/unit/check-flags.test                    |  305 ++
 test-data/unit/check-functions.test                | 1666 ++++++++++
 test-data/unit/check-generic-subtyping.test        |  746 +++++
 test-data/unit/check-generics.test                 | 1462 +++++++++
 test-data/unit/check-ignore.test                   |  218 ++
 test-data/unit/check-incremental.test              | 1780 ++++++++++
 test-data/unit/check-inference-context.test        |  880 +++++
 test-data/unit/check-inference.test                | 1765 ++++++++++
 test-data/unit/check-isinstance.test               | 1330 ++++++++
 test-data/unit/check-kwargs.test                   |  339 ++
 test-data/unit/check-lists.test                    |   72 +
 test-data/unit/check-modules.test                  | 1408 ++++++++
 test-data/unit/check-multiple-inheritance.test     |  242 ++
 test-data/unit/check-namedtuple.test               |  429 +++
 test-data/unit/check-newsyntax.test                |  100 +
 test-data/unit/check-newtype.test                  |  324 ++
 test-data/unit/check-optional.test                 |  546 ++++
 test-data/unit/check-overloading.test              |  759 +++++
 test-data/unit/check-python2.test                  |  242 ++
 test-data/unit/check-selftype.test                 |  358 +++
 test-data/unit/check-semanal-error.test            |   81 +
 test-data/unit/check-statements.test               | 1451 +++++++++
 test-data/unit/check-super.test                    |  109 +
 test-data/unit/check-tuples.test                   |  927 ++++++
 test-data/unit/check-type-aliases.test             |   74 +
 test-data/unit/check-type-checks.test              |  113 +
 test-data/unit/check-type-promotion.test           |   39 +
 test-data/unit/check-typeddict.test                |  462 +++
 test-data/unit/check-typevar-values.test           |  505 +++
 test-data/unit/check-underscores.test              |   16 +
 test-data/unit/check-unions.test                   |  219 ++
 test-data/unit/check-unreachable-code.test         |  459 +++
 test-data/unit/check-unsupported.test              |   15 +
 test-data/unit/check-varargs.test                  |  592 ++++
 test-data/unit/check-warnings.test                 |  132 +
 test-data/unit/cmdline.test                        |  479 +++
 test-data/unit/fixtures/__new__.pyi                |   14 +
 test-data/unit/fixtures/alias.pyi                  |   12 +
 test-data/unit/fixtures/args.pyi                   |   29 +
 test-data/unit/fixtures/async_await.pyi            |    9 +
 test-data/unit/fixtures/bool.pyi                   |   15 +
 test-data/unit/fixtures/callable.pyi               |   26 +
 test-data/unit/fixtures/classmethod.pyi            |   22 +
 test-data/unit/fixtures/complex.pyi                |   11 +
 test-data/unit/fixtures/dict.pyi                   |   35 +
 test-data/unit/fixtures/exception.pyi              |   13 +
 test-data/unit/fixtures/for.pyi                    |   19 +
 test-data/unit/fixtures/function.pyi               |   10 +
 test-data/unit/fixtures/isinstance.pyi             |   22 +
 test-data/unit/fixtures/isinstancelist.pyi         |   44 +
 test-data/unit/fixtures/list.pyi                   |   30 +
 test-data/unit/fixtures/module.pyi                 |   18 +
 test-data/unit/fixtures/module_all.pyi             |   15 +
 test-data/unit/fixtures/module_all_python2.pyi     |   16 +
 test-data/unit/fixtures/ops.pyi                    |   58 +
 test-data/unit/fixtures/primitives.pyi             |   17 +
 test-data/unit/fixtures/property.pyi               |   17 +
 test-data/unit/fixtures/python2.pyi                |   18 +
 test-data/unit/fixtures/set.pyi                    |   21 +
 test-data/unit/fixtures/slice.pyi                  |   13 +
 test-data/unit/fixtures/staticmethod.pyi           |   19 +
 test-data/unit/fixtures/transform.pyi              |   30 +
 test-data/unit/fixtures/tuple-simple.pyi           |   20 +
 test-data/unit/fixtures/tuple.pyi                  |   29 +
 test-data/unit/fixtures/union.pyi                  |   18 +
 test-data/unit/lib-stub/__builtin__.pyi            |   27 +
 test-data/unit/lib-stub/abc.pyi                    |    3 +
 test-data/unit/lib-stub/builtins.pyi               |   23 +
 test-data/unit/lib-stub/collections.pyi            |    3 +
 test-data/unit/lib-stub/mypy_extensions.pyi        |    6 +
 test-data/unit/lib-stub/sys.pyi                    |    2 +
 test-data/unit/lib-stub/types.pyi                  |    4 +
 test-data/unit/lib-stub/typing.pyi                 |   90 +
 test-data/unit/parse-errors.test                   |  448 +++
 test-data/unit/parse-python2.test                  |  399 +++
 test-data/unit/parse.test                          | 3386 ++++++++++++++++++++
 test-data/unit/python2eval.test                    |  474 +++
 test-data/unit/pythoneval-asyncio.test             |  486 +++
 test-data/unit/pythoneval-enum.test                |  134 +
 test-data/unit/pythoneval.test                     | 1214 +++++++
 test-data/unit/semanal-abstractclasses.test        |  119 +
 test-data/unit/semanal-basic.test                  |  459 +++
 test-data/unit/semanal-classes.test                |  623 ++++
 test-data/unit/semanal-errors.test                 | 1336 ++++++++
 test-data/unit/semanal-expressions.test            |  395 +++
 test-data/unit/semanal-modules.test                |  877 +++++
 test-data/unit/semanal-namedtuple.test             |  177 +
 test-data/unit/semanal-python2.test                |   76 +
 test-data/unit/semanal-statements.test             |  929 ++++++
 test-data/unit/semanal-symtable.test               |   52 +
 test-data/unit/semanal-typealiases.test            |  440 +++
 test-data/unit/semanal-typeddict.test              |   81 +
 test-data/unit/semanal-typeinfo.test               |   80 +
 test-data/unit/semanal-types.test                  | 1465 +++++++++
 test-data/unit/stubgen.test                        |  565 ++++
 test-data/unit/typexport-basic.test                | 1159 +++++++
 test-requirements.txt                              |    9 +
 tmp-test-dirs/.gitignore                           |    4 +
 typeshed/stdlib/2/UserDict.pyi                     |   10 +-
 typeshed/stdlib/2/__builtin__.pyi                  |   40 +-
 typeshed/stdlib/2/ast.pyi                          |    2 +-
 typeshed/stdlib/2/builtins.pyi                     |   40 +-
 typeshed/stdlib/2/codecs.pyi                       |   10 +-
 typeshed/stdlib/2/collections.pyi                  |   16 +-
 typeshed/stdlib/2/datetime.pyi                     |    8 +-
 typeshed/stdlib/2/decimal.pyi                      |    2 +-
 typeshed/stdlib/2/sqlite3/dbapi2.pyi               |   13 +-
 typeshed/stdlib/2/types.pyi                        |    7 +-
 typeshed/stdlib/2/typing.pyi                       |   24 +-
 typeshed/stdlib/2/unittest.pyi                     |   12 +-
 typeshed/stdlib/2/wsgiref/types.pyi                |   34 +
 typeshed/stdlib/2/xml/etree/ElementTree.pyi        |  116 -
 typeshed/stdlib/2and3/argparse.pyi                 |  116 +-
 typeshed/stdlib/2and3/logging/__init__.pyi         |    2 +-
 typeshed/stdlib/{2 => 2and3}/syslog.pyi            |    6 +
 .../{2 => 2and3}/xml/etree/ElementInclude.pyi      |    2 -
 .../stdlib/{2 => 2and3}/xml/etree/ElementPath.pyi  |    2 -
 .../{3.2 => 2and3}/xml/etree/ElementTree.pyi       |   48 +-
 .../{3.4/xml => 2and3/xml/etree}/__init__.pyi      |    0
 .../stdlib/{2 => 2and3}/xml/etree/cElementTree.pyi |    2 -
 typeshed/stdlib/3.2/xml/etree/ElementInclude.pyi   |   19 -
 typeshed/stdlib/3.2/xml/etree/ElementPath.pyi      |   35 -
 typeshed/stdlib/3.2/xml/etree/cElementTree.pyi     |    5 -
 typeshed/stdlib/3.3/xml/etree/ElementInclude.pyi   |   19 -
 typeshed/stdlib/3.3/xml/etree/ElementPath.pyi      |   35 -
 typeshed/stdlib/3.3/xml/etree/ElementTree.pyi      |  113 -
 typeshed/stdlib/3.3/xml/etree/cElementTree.pyi     |    5 -
 typeshed/stdlib/3.4/asyncio/__init__.pyi           |    4 +-
 typeshed/stdlib/3.4/asyncio/queues.pyi             |   15 +-
 typeshed/stdlib/3.4/asyncio/streams.pyi            |    5 +-
 typeshed/stdlib/3.4/asyncio/subprocess.pyi         |    4 +-
 typeshed/stdlib/3.4/asyncio/tasks.pyi              |   16 +-
 typeshed/stdlib/3.4/xml/etree/ElementInclude.pyi   |   19 -
 typeshed/stdlib/3.4/xml/etree/ElementPath.pyi      |   35 -
 typeshed/stdlib/3.4/xml/etree/ElementTree.pyi      |  118 -
 typeshed/stdlib/3.4/xml/etree/cElementTree.pyi     |    5 -
 typeshed/stdlib/3.5/xml/etree/ElementInclude.pyi   |   19 -
 typeshed/stdlib/3.5/xml/etree/ElementPath.pyi      |   35 -
 typeshed/stdlib/3.5/xml/etree/ElementTree.pyi      |  118 -
 typeshed/stdlib/3.5/xml/etree/cElementTree.pyi     |    5 -
 typeshed/stdlib/3/ast.pyi                          |    2 +-
 typeshed/stdlib/3/builtins.pyi                     |   31 +-
 typeshed/stdlib/3/calendar.pyi                     |    7 +-
 typeshed/stdlib/3/codecs.pyi                       |    8 +-
 typeshed/stdlib/3/collections/__init__.pyi         |   24 +-
 typeshed/stdlib/3/collections/abc.pyi              |    1 +
 typeshed/stdlib/3/decimal.pyi                      |    1 -
 typeshed/stdlib/3/itertools.pyi                    |    2 +-
 typeshed/stdlib/3/os/__init__.pyi                  |    2 +
 typeshed/stdlib/3/sqlite3/dbapi2.pyi               |    1 -
 typeshed/stdlib/3/subprocess.pyi                   |    6 +-
 typeshed/stdlib/3/sys.pyi                          |    5 +
 typeshed/stdlib/3/types.pyi                        |    2 +
 typeshed/stdlib/3/typing.pyi                       |   40 +-
 typeshed/stdlib/3/urllib/parse.pyi                 |   21 +-
 typeshed/stdlib/3/wsgiref/types.pyi                |   33 +
 typeshed/stdlib/3/xml/etree/ElementInclude.pyi     |   19 -
 typeshed/stdlib/3/xml/etree/ElementPath.pyi        |   25 -
 typeshed/stdlib/3/xml/etree/ElementTree.pyi        |   98 -
 typeshed/stdlib/3/xml/etree/cElementTree.pyi       |    5 -
 typeshed/third_party/2/OpenSSL/crypto.pyi          |  187 +-
 .../2/cryptography}/__init__.pyi                   |    0
 .../2/cryptography/hazmat}/__init__.pyi            |    0
 .../2/cryptography/hazmat/primitives}/__init__.pyi |    0
 .../hazmat/primitives/asymmetric}/__init__.pyi     |    0
 .../hazmat/primitives/asymmetric/dsa.pyi           |    4 +
 .../hazmat/primitives/asymmetric/rsa.pyi           |    4 +
 .../hazmat/primitives/serialization.pyi            |   32 +
 typeshed/third_party/2/dateutil/tz/__init__.pyi    |    5 +
 typeshed/third_party/2/dateutil/tz/_common.pyi     |   28 +
 typeshed/third_party/2/dateutil/tz/tz.pyi          |   85 +
 typeshed/third_party/2/requests/api.pyi            |   19 +-
 typeshed/third_party/2/six/__init__.pyi            |    2 +-
 typeshed/third_party/2and3/pymysql/__init__.pyi    |   39 +
 typeshed/third_party/2and3/pymysql/charset.pyi     |   20 +
 typeshed/third_party/2and3/pymysql/connections.pyi |  142 +
 .../third_party/2and3/pymysql/constants/CLIENT.pyi |   24 +
 .../2and3/pymysql/constants/COMMAND.pyi            |   28 +
 .../third_party/2and3/pymysql/constants/ER.pyi     |  477 +++
 .../2and3/pymysql/constants/FIELD_TYPE.pyi         |   35 +
 .../third_party/2and3/pymysql/constants/FLAG.pyi   |   21 +
 .../2and3/pymysql/constants/SERVER_STATUS.pyi      |   16 +
 .../2and3/pymysql/constants/__init__.pyi           |    3 +
 typeshed/third_party/2and3/pymysql/converters.pyi  |   50 +
 typeshed/third_party/2and3/pymysql/cursors.pyi     |   37 +
 typeshed/third_party/2and3/pymysql/err.pyi         |   22 +
 typeshed/third_party/2and3/pymysql/times.pyi       |   14 +
 typeshed/third_party/2and3/pymysql/util.pyi        |    7 +
 .../{2 => 2and3}/sqlalchemy/__init__.pyi           |    0
 .../{2 => 2and3}/sqlalchemy/databases/__init__.pyi |    0
 .../{2 => 2and3}/sqlalchemy/databases/mysql.pyi    |    0
 .../{2 => 2and3}/sqlalchemy/dialects/__init__.pyi  |    0
 .../sqlalchemy/dialects/mysql/__init__.pyi         |    0
 .../sqlalchemy/dialects/mysql/base.pyi             |    0
 .../{2 => 2and3}/sqlalchemy/engine/__init__.pyi    |    0
 .../{2 => 2and3}/sqlalchemy/engine/base.pyi        |    0
 .../{2 => 2and3}/sqlalchemy/engine/strategies.pyi  |    0
 .../{2 => 2and3}/sqlalchemy/engine/url.pyi         |    0
 .../third_party/{2 => 2and3}/sqlalchemy/exc.pyi    |    0
 .../{2 => 2and3}/sqlalchemy/inspection.pyi         |    0
 .../third_party/{2 => 2and3}/sqlalchemy/log.pyi    |    0
 .../{2 => 2and3}/sqlalchemy/orm/__init__.pyi       |    6 +-
 .../{2 => 2and3}/sqlalchemy/orm/session.pyi        |    2 +-
 typeshed/third_party/2and3/sqlalchemy/orm/util.pyi |   12 +
 .../third_party/{2 => 2and3}/sqlalchemy/pool.pyi   |    0
 .../third_party/{2 => 2and3}/sqlalchemy/schema.pyi |    0
 .../{2 => 2and3}/sqlalchemy/sql/__init__.pyi       |    0
 .../{2 => 2and3}/sqlalchemy/sql/annotation.pyi     |    0
 .../{2 => 2and3}/sqlalchemy/sql/base.pyi           |    0
 .../{2 => 2and3}/sqlalchemy/sql/ddl.pyi            |    0
 .../{2 => 2and3}/sqlalchemy/sql/dml.pyi            |    0
 .../{2 => 2and3}/sqlalchemy/sql/elements.pyi       |   35 +-
 .../{2 => 2and3}/sqlalchemy/sql/expression.pyi     |    0
 .../{2 => 2and3}/sqlalchemy/sql/functions.pyi      |    0
 .../{2 => 2and3}/sqlalchemy/sql/naming.pyi         |    0
 .../{2 => 2and3}/sqlalchemy/sql/operators.pyi      |    0
 .../{2 => 2and3}/sqlalchemy/sql/schema.pyi         |    0
 .../{2 => 2and3}/sqlalchemy/sql/selectable.pyi     |    6 +-
 .../{2 => 2and3}/sqlalchemy/sql/sqltypes.pyi       |    0
 .../{2 => 2and3}/sqlalchemy/sql/type_api.pyi       |    0
 .../{2 => 2and3}/sqlalchemy/sql/visitors.pyi       |    0
 .../third_party/{2 => 2and3}/sqlalchemy/types.pyi  |    0
 .../{2 => 2and3}/sqlalchemy/util/__init__.pyi      |    0
 .../{2 => 2and3}/sqlalchemy/util/_collections.pyi  |    0
 .../{2 => 2and3}/sqlalchemy/util/compat.pyi        |    4 +-
 .../{2 => 2and3}/sqlalchemy/util/deprecations.pyi  |    0
 .../{2 => 2and3}/sqlalchemy/util/langhelpers.pyi   |    0
 typeshed/third_party/3/dateutil/tz/__init__.pyi    |    5 +
 typeshed/third_party/3/dateutil/tz/_common.pyi     |   28 +
 typeshed/third_party/3/dateutil/tz/tz.pyi          |   85 +
 typeshed/third_party/3/requests/api.pyi            |   16 +-
 426 files changed, 94677 insertions(+), 1718 deletions(-)

diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..731180e
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,29 @@
+build/
+__pycache__
+*.py[cod]
+*~
+@*
+/build
+/env
+docs/build/
+*.iml
+/out/
+.venv/
+.mypy_cache/
+.incremental_checker_cache.json
+.cache
+
+# Packages
+*.egg
+*.egg-info
+
+# IDEs
+.idea
+*.swp
+
+# Operating Systems
+.DS_store
+
+# Coverage Files
+htmlcov
+.coverage*
diff --git a/.gitmodules b/.gitmodules
new file mode 100644
index 0000000..6b366ad
--- /dev/null
+++ b/.gitmodules
@@ -0,0 +1,3 @@
+[submodule "typeshed"]
+	path = typeshed
+	url = http://github.com/python/typeshed
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..4c25d0e
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,20 @@
+sudo: false
+language: python
+python:
+  - "3.3"
+  - "3.4"
+  # Specifically request 3.5.1 because we need to be compatible with that.
+  - "3.5.1"
+  - "3.6"
+  - "3.7-dev"
+  # Pypy build is disabled because it doubles the travis build time, and it rarely fails
+  # unless one one of the other builds fails.
+  # - "pypy3"
+
+install:
+  - pip install -r test-requirements.txt
+  - python setup.py install
+
+script:
+  - python runtests.py -x lint
+  - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then flake8; fi
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..c01af3b
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,152 @@
+Contributing to Mypy
+====================
+
+Welcome!  Mypy is a community project that aims to work for a wide
+range of Python users and Python codebases.  If you're trying Mypy on
+your Python code, your experience and what you can contribute are
+important to the project's success.
+
+
+Getting started, building, and testing
+--------------------------------------
+
+If you haven't already, take a look at the project's
+[README.md file](README.md)
+and the [Mypy documentation](http://mypy.readthedocs.io/en/latest/),
+and try adding type annotations to your file and type-checking it with Mypy.
+
+
+Discussion
+----------
+
+If you've run into behavior in Mypy you don't understand, or you're
+having trouble working out a good way to apply it to your code, or
+you've found a bug or would like a feature it doesn't have, we want to
+hear from you!
+
+Our main forum for discussion is the project's [GitHub issue
+tracker](https://github.com/python/mypy/issues).  This is the right
+place to start a discussion of any of the above or most any other
+topic concerning the project.
+
+For less formal discussion we have a chat room on
+[gitter.im](https://gitter.im/python/mypy).  Some Mypy core developers
+are almost always present; feel free to find us there and we're happy
+to chat.  Substantive technical discussion will be directed to the
+issue tracker.
+
+(We also have an IRC channel, `#python-mypy` on irc.freenode.net.
+This is lightly used, we have mostly switched to the gitter room
+mentioned above.)
+
+#### Code of Conduct
+
+Everyone participating in the Mypy community, and in particular in our
+issue tracker, pull requests, and IRC channel, is expected to treat
+other people with respect and more generally to follow the guidelines
+articulated in the [Python Community Code of
+Conduct](https://www.python.org/psf/codeofconduct/).
+
+
+Submitting Changes
+------------------
+
+Even more excellent than a good bug report is a fix for a bug, or the
+implementation of a much-needed new feature. (*)  We'd love to have
+your contributions.
+
+(*) If your new feature will be a lot of work, we recommend talking to
+    us early -- see below.
+
+We use the usual GitHub pull-request flow, which may be familiar to
+you if you've contributed to other projects on GitHub.  For the mechanics,
+see [our git and GitHub workflow help page](https://github.com/python/mypy/wiki/Using-Git-And-GitHub),
+or [GitHub's own documentation](https://help.github.com/articles/using-pull-requests/).
+
+Anyone interested in Mypy may review your code.  One of the Mypy core
+developers will merge your pull request when they think it's ready.
+For every pull request, we aim to promptly either merge it or say why
+it's not yet ready; if you go a few days without a reply, please feel
+free to ping the thread by adding a new comment.
+
+At present the core developers are (alphabetically):
+* David Fisher (@ddfisher)
+* Jukka Lehtosalo (@JukkaL)
+* Greg Price (@gnprice)
+* Guido van Rossum (@gvanrossum)
+
+
+Preparing Changes
+-----------------
+
+Before you begin: if your change will be a significant amount of work
+to write, we highly recommend starting by opening an issue laying out
+what you want to do.  That lets a conversation happen early in case
+other contributors disagree with what you'd like to do or have ideas
+that will help you do it.
+
+The best pull requests are focused, clearly describe what they're for
+and why they're correct, and contain tests for whatever changes they
+make to the code's behavior.  As a bonus these are easiest for someone
+to review, which helps your pull request get merged quickly!  Standard
+advice about good pull requests for open-source projects applies; we
+have [our own writeup](https://github.com/python/mypy/wiki/Good-Pull-Request)
+of this advice.
+
+See also our [coding conventions](https://github.com/python/mypy/wiki/Code-Conventions) --
+which consist mainly of a reference to
+[PEP 8](https://www.python.org/dev/peps/pep-0008/) -- for the code you
+put in the pull request.
+
+You may also find other pages in the
+[Mypy developer guide](https://github.com/python/mypy/wiki/Developer-Guides)
+helpful in developing your change.
+
+
+Issue-tracker conventions
+-------------------------
+
+We aim to reply to all new issues promptly.  We'll assign a milestone
+to help us track which issues we intend to get to when, and may apply
+labels to carry some other information.  Here's what our milestones
+and labels mean.
+
+### Task priority and sizing
+
+We use GitHub "labels" ([see our
+list](https://github.com/python/mypy/labels)) to roughly order what we
+want to do soon and less soon.  There's two dimensions taken into
+account: **priority** (does it matter to our users) and **size** (how
+long will it take to complete).
+
+Bugs that aren't a huge deal but do matter to users and don't seem
+like a lot of work to fix generally will be dealt with sooner; things
+that will take longer may go further out.
+
+We are trying to keep the backlog at a manageable size, an issue that is
+unlikely to be acted upon in foreseeable future is going to be
+respectfully closed.  This doesn't mean the issue is not important, but
+rather reflects the limits of the team.
+
+The **question** label is for issue threads where a user is asking a
+question but it isn't yet clear that it represents something to actually
+change.  We use the issue tracker as the preferred venue for such
+questions, even when they aren't literally issues, to keep down the
+number of distinct discussion venues anyone needs to track.  These might
+evolve into a bug or feature request.
+
+Issues **without a priority or size** haven't been triaged.  We aim to
+triage all new issues promptly, but there are some issues from previous
+years that we haven't yet re-reviewed since adopting these conventions.
+
+### Other labels
+
+* **needs discussion**: This issue needs agreement on some kind of
+  design before it makes sense to implement it, and it either doesn't
+  yet have a design or doesn't yet have agreement on one.
+* **feature**, **bug**, **crash**, **refactoring**, **documentation**:
+  These classify the user-facing impact of the change.  Specifically
+  "refactoring" means there should be no user-facing effect.
+* **topic-** labels group issues touching a similar aspect of the
+  project, for example PEP 484 compatibility, a specific command-line
+  option or dependency.
diff --git a/CREDITS b/CREDITS
new file mode 100644
index 0000000..d4fe9ee
--- /dev/null
+++ b/CREDITS
@@ -0,0 +1,101 @@
+Credits
+-------
+
+Lead developer:
+
+  Jukka Lehtosalo <jukka.lehtosalo at iki.fi>
+
+Core team:
+
+  Guido <guido at dropbox.com>
+  David Fisher <ddfisher at dropbox.com>
+  Greg Price <gregprice at dropbox.com>
+
+Contributors (in alphabetical order, including typeshed):
+
+  Tim Abbott
+  Steven Allen (@Stebalien)
+  Della Anjeh
+  Reid Barton (@rwbarton)
+  Matthias Bussonnier
+  Anup Chenthamarakshan
+  Kyle Consalus
+  Ian Cordasco
+  ctcutler
+  Ben Darnell
+  Miguel Garcia (@rockneurotiko)
+  Mihnea Giurgea
+  Ryan Gonzalez (@kirbyfan64)
+  James Guthrie
+  Jared Hance
+  Ashley Hewson (@ashleyh)
+  icoxfog417
+  Bob Ippolito (@etrepum)
+  ismail-s
+  Sander Kersten (@spkersten)
+  Matthias Kramm
+  Ian Kronquist (@iankronquist)
+  Yuval Langer
+  Howard Lee
+  Tad Leonard
+  Li Haoyi
+  Darjus Loktevic
+  Ben Longbons
+  Florian Ludwig (@FlorianLudwig)
+  Robert T. McGibbon
+  Ron Murawski <ron at horizonchess.com>
+  Motoki Naruse
+  Jared Pochtar (@jaredp)
+  Michal Pokorný
+  Eric Price (@ecprice)
+  Brodie Rao
+  Sebastian Reuße
+  Sebastian Riikonen
+  Seo Sanghyeon
+  Marek Sapota
+  Gigi Sayfan
+  Vlad Shcherbina
+  Anders Schuller
+  Daniel Shaulov
+  David Shea
+  Vita Smid
+  Schuyler Smith
+  Marcell Vazquez-Chanlatte (@mvcisback)
+  Prayag Verma
+  Igor Vuk (@ivuk)
+  Jeff Walden (@jswalden)
+  Michael Walter
+  Jing Wang
+  Wen Zhang
+  Roy Williams
+  wizzardx
+  Matthew Wright
+  Yuanchao Zhu (@yczhu)
+  Gennadiy Zlobin (@gennad)
+
+Additional thanks to:
+
+  Alex Allain
+  Max Bolingbroke
+  Peter Calvert
+  Kannan Goundan
+  Kathy Gray
+  David J Greaves
+  Riitta Ikonen
+  Terho Ikonen
+  Stephen Kell
+  Łukasz Langa
+  Laura Lehtosalo
+  Peter Ludemann
+  Seppo Mattila
+  Robin Message
+  Alan Mycroft
+  Dominic Orchard
+  Pekka Rapinoja
+  Matt Robben
+  Satnam Singh
+  Juha Sorva
+  Clay Sweetser
+  Jorma Tarhio
+  Jussi Tuovila
+  Andrey Vlasovskikh
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..8145cc3
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,228 @@
+Mypy is licensed under the terms of the MIT license, reproduced below.
+
+= = = = =
+
+The MIT License
+
+Copyright (c) 2015-2016 Jukka Lehtosalo and contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
+= = = = =
+
+Portions of mypy are licensed under different licenses.  The files
+under stdlib-samples and lib-typing are licensed under the PSF 2
+License, reproduced below.
+
+= = = = =
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012 Python Software Foundation; All Rights Reserved" are retained in Python
+alone or in any derivative version prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee.  This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis.  BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions.  Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee.  This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party.  As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee.  Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement.  This Agreement together with
+Python 1.6.1 may be located on the Internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013.  This
+Agreement may also be obtained from a proxy server on the Internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis.  CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement.  Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee.  This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+        ACCEPT
+
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands.  All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+= = = = =
\ No newline at end of file
diff --git a/PKG-INFO b/PKG-INFO
index c9cf741..236181d 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: mypy
-Version: 0.470
+Version: 0.480.dev0
 Summary: Optional static typing for Python
 Home-page: http://www.mypy-lang.org/
 Author: Jukka Lehtosalo
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..159038d
--- /dev/null
+++ b/README.md
@@ -0,0 +1,310 @@
+Mypy: Optional Static Typing for Python
+=======================================
+
+[![Build Status](https://travis-ci.org/python/mypy.svg)](https://travis-ci.org/python/mypy)
+[![Chat at https://gitter.im/python/mypy](https://badges.gitter.im/python/mypy.svg)](https://gitter.im/python/mypy?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
+
+
+Got a question? File an issue!
+------------------------------
+
+We don't have a mailing list; but we are always happy to answer
+questions on [gitter chat](https://gitter.im/python/mypy) or filed as
+issues in our trackers:
+
+- [mypy tracker](https://github.com/python/mypy/issues)
+  for mypy isues
+- [typeshed tracker](https://github.com/python/typeshed/issues)
+  for issues with specific modules
+- [typing tracker](https://github.com/python/typing/issues)
+  for discussion of new type system features (PEP 484 changes) and
+  runtime bugs in the typing module
+
+What is mypy?
+-------------
+
+Mypy is an optional static type checker for Python.  You can add type
+hints to your Python programs using the standard for type
+annotations introduced in Python 3.5 ([PEP 484](https://www.python.org/dev/peps/pep-0484/)), and use mypy to
+type check them statically. Find bugs in your programs without even
+running them!
+
+The type annotation standard has also been backported to earlier
+Python 3.x versions.  Mypy supports Python 3.3 and later.
+
+For Python 2.7, you can add annotations as comments (this is also
+specified in [PEP 484](https://www.python.org/dev/peps/pep-0484/)).
+
+You can mix dynamic and static typing in your programs. You can always
+fall back to dynamic typing when static typing is not convenient, such
+as for legacy code.
+
+Here is a small example to whet your appetite:
+
+```python
+from typing import Iterator
+
+def fib(n: int) -> Iterator[int]:
+    a, b = 0, 1
+    while a < n:
+        yield a
+        a, b = b, a + b
+```
+
+Mypy is in development; some features are missing and there are bugs.
+See 'Development status' below.
+
+
+Requirements
+------------
+
+You need Python 3.3 or later to run mypy.  You can have multiple Python
+versions (2.x and 3.x) installed on the same system without problems.
+
+In Ubuntu, Mint and Debian you can install Python 3 like this:
+
+    $ sudo apt-get install python3 python3-pip
+
+For other Linux flavors, OS X and Windows, packages are available at
+
+  http://www.python.org/getit/
+
+
+Quick start
+-----------
+
+Mypy can be installed using pip:
+
+    $ python3 -m pip install -U mypy
+
+If you want to run the latest version of the code, you can install from git:
+
+    $ python3 -m pip install -U git+git://github.com/python/mypy.git
+
+
+Now, if Python on your system is configured properly (else see
+"Troubleshooting" below), you can type-check the [statically typed parts] of a
+program like this:
+
+    $ mypy PROGRAM
+
+You can always use a Python interpreter to run your statically typed
+programs, even if they have type errors:
+
+    $ python3 PROGRAM
+
+[statically typed parts]: http://mypy.readthedocs.io/en/latest/basics.html#function-signatures
+
+
+Web site and documentation
+--------------------------
+
+Documentation and additional information is available at the web site:
+
+  http://www.mypy-lang.org/
+
+Or you can jump straight to the documentation:
+
+  http://mypy.readthedocs.io/
+
+
+Troubleshooting
+---------------
+
+Depending on your configuration, you may have to run `pip3` like
+this:
+
+    $ python3 -m pip install -U mypy
+
+Except on Windows, it's best to always use the `--fast-parser`
+option to mypy; this requires installing `typed-ast`:
+
+    $ python3 -m pip install -U typed-ast
+
+If the `mypy` command isn't found after installation: After either
+`pip3 install` or `setup.py install`, the `mypy` script and
+dependencies, including the `typing` module, will be installed to
+system-dependent locations.  Sometimes the script directory will not
+be in `PATH`, and you have to add the target directory to `PATH`
+manually or create a symbolic link to the script.  In particular, on
+Mac OS X, the script may be installed under `/Library/Frameworks`:
+
+    /Library/Frameworks/Python.framework/Versions/<version>/bin
+
+In Windows, the script is generally installed in
+`\PythonNN\Scripts`. So, type check a program like this (replace
+`\Python34` with your Python installation path):
+
+    C:\>\Python34\python \Python34\Scripts\mypy PROGRAM
+
+### Working with `virtualenv`
+
+If you are using [`virtualenv`](https://virtualenv.pypa.io/en/stable/),
+make sure you are running a python3 environment. Installing via `pip3`
+in a v2 environment will not configure the environment to run installed
+modules from the command line.
+
+    $ python3 -m pip install -U virtualenv
+    $ python3 -m virtualenv env
+
+
+Quick start for contributing to mypy
+------------------------------------
+
+If you want to contribute, first clone the mypy git repository:
+
+    $ git clone --recurse-submodules https://github.com/python/mypy.git
+
+From the mypy directory, use pip to install mypy:
+
+    $ cd mypy
+    $ python3 -m pip install -U .
+
+Replace `python3` with your Python 3 interpreter.  You may have to do
+the above as root. For example, in Ubuntu:
+
+    $ sudo python3 -m pip install -U .
+
+Now you can use the `mypy` program just as above.  In case of trouble
+see "Troubleshooting" above.
+
+The mypy wiki contains some useful information for contributors:
+
+  https://github.com/python/mypy/wiki/Developer-Guides
+
+Working with the git version of mypy
+------------------------------------
+
+mypy contains a submodule, "typeshed". See http://github.com/python/typeshed.
+This submodule contains types for the Python standard library.
+
+Due to the way git submodules work, you'll have to do
+```
+  git submodule update typeshed
+```
+whenever you change branches, merge, rebase, or pull.
+
+(It's possible to automate this: Search Google for "git hook update submodule")
+
+Running tests and linting
+-------------------------
+
+First install any additional dependencies needed for testing:
+
+    $ python3 -m pip install -U -r test-requirements.txt
+
+To run all tests, run the script `runtests.py` in the mypy repository:
+
+    $ ./runtests.py
+
+Note that some tests will be disabled for older python versions.
+
+This will run all tests, including integration and regression tests,
+and will type check mypy and verify that all stubs are valid.
+
+You can run a subset of test suites by passing positive or negative
+filters:
+
+    $ ./runtests.py lex parse -x lint -x stub
+
+For example, to run unit tests only, which run pretty quickly:
+
+    $ ./runtests.py unit-test pytest
+
+The unit test suites are driven by a mixture of test frameworks:
+mypy's own `myunit` framework, and `pytest`, which we're in the
+process of migrating to.  For finer control over which unit tests are
+run and how, you can run `py.test` or `scripts/myunit` directly, or
+pass inferior arguments via `-a`:
+
+    $ py.test mypy/test/testcheck.py -v -k MethodCall
+    $ ./runtests.py -v 'pytest mypy/test/testcheck' -a -v -a -k -a MethodCall
+
+    $ PYTHONPATH=$PWD scripts/myunit -m mypy.test.testlex -v '*backslash*'
+    $ ./runtests.py mypy.test.testlex -a -v -a '*backslash*'
+
+You can also run the type checker for manual testing without
+installing anything by setting up the Python module search path
+suitably (the lib-typing/3.2 path entry is not needed for Python 3.5
+or when you have manually installed the `typing` module):
+
+    $ export PYTHONPATH=$PWD:$PWD/lib-typing/3.2
+    $ python<version> -m mypy PROGRAM.py
+
+You can add the entry scripts to PATH for a single python3 version:
+
+    $ export PATH=$PWD/scripts
+    $ mypy PROGRAM.py
+
+You can check a module or string instead of a file:
+
+    $ mypy PROGRAM.py
+    $ mypy -m MODULE
+    $ mypy -c 'import MODULE'
+
+To run the linter:
+
+    $ ./runtests.py lint
+
+
+Coverage reports
+----------------
+
+There is an experimental feature to generate coverage reports.  To use
+this feature, you need to `pip install -U lxml`.  This is an extension
+module and requires various library headers to install; on a
+Debian-derived system the command
+  `apt-get install python3-dev libxml2-dev libxslt1-dev`
+may provide the necessary dependencies.
+
+To use the feature, pass e.g. `--txt-report "$(mktemp -d)"`.
+
+
+Development status
+------------------
+
+Mypy is work in progress and is not yet production quality, though
+mypy development has been done using mypy for a while!
+
+Here are some of the more significant Python features not supported
+right now (but all of these will improve):
+
+ - properties with setters not supported
+ - limited metaclass support
+ - only a subset of Python standard library modules are supported, and some
+   only partially
+ - 3rd party module support is limited
+
+The current development focus is to have a good coverage of Python
+features and the standard library (both 3.x and 2.7).
+
+
+Issue tracker
+-------------
+
+Please report any bugs and enhancement ideas using the mypy issue
+tracker:
+
+  https://github.com/python/mypy/issues
+
+Feel free to also ask questions on the tracker.
+
+
+Help wanted
+-----------
+
+Any help in testing, development, documentation and other tasks is
+highly appreciated and useful to the project. There are tasks for
+contributors of all experience levels. If you're just getting started,
+check out the
+[difficulty/easy](https://github.com/python/mypy/labels/difficulty%2Feasy)
+label.
+
+For more details, see the file [CONTRIBUTING.md](CONTRIBUTING.md).
+
+
+License
+-------
+
+Mypy is licensed under the terms of the MIT License (see the file
+LICENSE).
diff --git a/appveyor.yml b/appveyor.yml
new file mode 100644
index 0000000..80fd0f8
--- /dev/null
+++ b/appveyor.yml
@@ -0,0 +1,32 @@
+environment:
+  matrix:
+
+    - PYTHON: "C:\\Python35"
+      PYTHON_VERSION: "3.5.1"
+      PYTHON_ARCH: "32"
+      
+    - PYTHON: "C:\\Python35-x64"
+      PYTHON_VERSION: "3.5.1"
+      PYTHON_ARCH: "64"
+    
+    - PYTHON: "C:\\Python36"
+      PYTHON_VERSION: "3.6.x"
+      PYTHON_ARCH: "32"
+      
+    - PYTHON: "C:\\Python36-x64"
+      PYTHON_VERSION: "3.6.x"
+      PYTHON_ARCH: "64"
+     
+     
+install:
+    - "%PYTHON%\\python.exe -m pip install -r test-requirements.txt"
+    - "git submodule update --init typeshed"
+    - "%PYTHON%\\python.exe setup.py -q install"
+   
+build: off
+
+test_script:
+    # Ignore lint (it's run separately below), reports (since we don't have lxml),
+    # and cmdline (since one of its tests depend on lxml)
+    - "%PYTHON%\\python.exe runtests.py -x lint -x reports -x cmdline"
+    - ps: if ($env:PYTHON_VERSION -Match "3.6.x" -And $env:PYTHON_ARCH -Match "64") { iex "$env:PYTHON\\python.exe -m flake8" }
diff --git a/build-requirements.txt b/build-requirements.txt
new file mode 100644
index 0000000..0a8547b
--- /dev/null
+++ b/build-requirements.txt
@@ -0,0 +1,2 @@
+setuptools
+wheel
diff --git a/conftest.py b/conftest.py
new file mode 100644
index 0000000..9673db2
--- /dev/null
+++ b/conftest.py
@@ -0,0 +1,3 @@
+pytest_plugins = [
+    'mypy.test.data',
+]
diff --git a/docs/Makefile b/docs/Makefile
new file mode 100644
index 0000000..be69e9d
--- /dev/null
+++ b/docs/Makefile
@@ -0,0 +1,177 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS    =
+SPHINXBUILD   = sphinx-build
+PAPER         =
+BUILDDIR      = build
+
+# User-friendly check for sphinx-build
+ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
+$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
+endif
+
+# Internal variables.
+PAPEROPT_a4     = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS  = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+	@echo "Please use \`make <target>' where <target> is one of"
+	@echo "  html       to make standalone HTML files"
+	@echo "  dirhtml    to make HTML files named index.html in directories"
+	@echo "  singlehtml to make a single large HTML file"
+	@echo "  pickle     to make pickle files"
+	@echo "  json       to make JSON files"
+	@echo "  htmlhelp   to make HTML files and a HTML help project"
+	@echo "  qthelp     to make HTML files and a qthelp project"
+	@echo "  devhelp    to make HTML files and a Devhelp project"
+	@echo "  epub       to make an epub"
+	@echo "  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+	@echo "  latexpdf   to make LaTeX files and run them through pdflatex"
+	@echo "  latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+	@echo "  text       to make text files"
+	@echo "  man        to make manual pages"
+	@echo "  texinfo    to make Texinfo files"
+	@echo "  info       to make Texinfo files and run them through makeinfo"
+	@echo "  gettext    to make PO message catalogs"
+	@echo "  changes    to make an overview of all changed/added/deprecated items"
+	@echo "  xml        to make Docutils-native XML files"
+	@echo "  pseudoxml  to make pseudoxml-XML files for display purposes"
+	@echo "  linkcheck  to check all external links for integrity"
+	@echo "  doctest    to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+	rm -rf $(BUILDDIR)/*
+
+html:
+	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+	$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+	@echo
+	@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+	@echo
+	@echo "Build finished; now you can process the pickle files."
+
+json:
+	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+	@echo
+	@echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+	@echo
+	@echo "Build finished; now you can run HTML Help Workshop with the" \
+	      ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+	$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+	@echo
+	@echo "Build finished; now you can run "qcollectiongenerator" with the" \
+	      ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+	@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Mypy.qhcp"
+	@echo "To view the help file:"
+	@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Mypy.qhc"
+
+devhelp:
+	$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+	@echo
+	@echo "Build finished."
+	@echo "To view the help file:"
+	@echo "# mkdir -p $$HOME/.local/share/devhelp/Mypy"
+	@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Mypy"
+	@echo "# devhelp"
+
+epub:
+	$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+	@echo
+	@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo
+	@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+	@echo "Run \`make' in that directory to run these through (pdf)latex" \
+	      "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo "Running LaTeX files through pdflatex..."
+	$(MAKE) -C $(BUILDDIR)/latex all-pdf
+	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+latexpdfja:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo "Running LaTeX files through platex and dvipdfmx..."
+	$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+	$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+	@echo
+	@echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+	$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+	@echo
+	@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+	@echo
+	@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+	@echo "Run \`make' in that directory to run these through makeinfo" \
+	      "(use \`make info' here to do that automatically)."
+
+info:
+	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+	@echo "Running Texinfo files through makeinfo..."
+	make -C $(BUILDDIR)/texinfo info
+	@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+	$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+	@echo
+	@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+	@echo
+	@echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+	@echo
+	@echo "Link check complete; look for any errors in the above output " \
+	      "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+	$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+	@echo "Testing of doctests in the sources finished, look at the " \
+	      "results in $(BUILDDIR)/doctest/output.txt."
+
+xml:
+	$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+	@echo
+	@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+pseudoxml:
+	$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+	@echo
+	@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
diff --git a/docs/README.md b/docs/README.md
new file mode 100644
index 0000000..2122eef
--- /dev/null
+++ b/docs/README.md
@@ -0,0 +1,49 @@
+Mypy Documentation
+==================
+
+What's this?
+------------
+
+This directory contains the source code for Mypy documentation (under `source/`)
+and build scripts. The documentation uses Sphinx and reStructuredText. We use
+`sphinx-rtd-theme` as the documentation theme.
+
+Building the documentation
+--------------------------
+
+Install Sphinx and other dependencies (i.e. theme) needed for the documentation.
+From the `docs` directory, use `pip`:
+
+```
+$ pip install -r requirements-docs.txt
+```
+
+Build the documentation like this:
+
+```
+$ make html
+```
+
+The built documentation will be placed in the `docs/build` directory. Open
+`docs/build/index.html` to view the documentation.
+
+Helpful documentation build commands
+------------------------------------
+
+Clean the documentation build:
+
+```
+$ make clean
+```
+
+Test and check the links found in the documentation:
+
+```
+$ make linkcheck
+```
+
+Documentation on Read The Docs
+------------------------------
+
+The mypy documentation is hosted on Read The Docs, and the latest version
+can be found at https://mypy.readthedocs.io/en/latest.
diff --git a/docs/make.bat b/docs/make.bat
new file mode 100755
index 0000000..1e3d843
--- /dev/null
+++ b/docs/make.bat
@@ -0,0 +1,242 @@
+ at ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+	set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source
+set I18NSPHINXOPTS=%SPHINXOPTS% source
+if NOT "%PAPER%" == "" (
+	set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+	set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+	:help
+	echo.Please use `make ^<target^>` where ^<target^> is one of
+	echo.  html       to make standalone HTML files
+	echo.  dirhtml    to make HTML files named index.html in directories
+	echo.  singlehtml to make a single large HTML file
+	echo.  pickle     to make pickle files
+	echo.  json       to make JSON files
+	echo.  htmlhelp   to make HTML files and a HTML help project
+	echo.  qthelp     to make HTML files and a qthelp project
+	echo.  devhelp    to make HTML files and a Devhelp project
+	echo.  epub       to make an epub
+	echo.  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+	echo.  text       to make text files
+	echo.  man        to make manual pages
+	echo.  texinfo    to make Texinfo files
+	echo.  gettext    to make PO message catalogs
+	echo.  changes    to make an overview over all changed/added/deprecated items
+	echo.  xml        to make Docutils-native XML files
+	echo.  pseudoxml  to make pseudoxml-XML files for display purposes
+	echo.  linkcheck  to check all external links for integrity
+	echo.  doctest    to run all doctests embedded in the documentation if enabled
+	goto end
+)
+
+if "%1" == "clean" (
+	for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+	del /q /s %BUILDDIR%\*
+	goto end
+)
+
+
+%SPHINXBUILD% 2> nul
+if errorlevel 9009 (
+	echo.
+	echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+	echo.installed, then set the SPHINXBUILD environment variable to point
+	echo.to the full path of the 'sphinx-build' executable. Alternatively you
+	echo.may add the Sphinx directory to PATH.
+	echo.
+	echo.If you don't have Sphinx installed, grab it from
+	echo.http://sphinx-doc.org/
+	exit /b 1
+)
+
+if "%1" == "html" (
+	%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+	goto end
+)
+
+if "%1" == "dirhtml" (
+	%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+	goto end
+)
+
+if "%1" == "singlehtml" (
+	%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+	goto end
+)
+
+if "%1" == "pickle" (
+	%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can process the pickle files.
+	goto end
+)
+
+if "%1" == "json" (
+	%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can process the JSON files.
+	goto end
+)
+
+if "%1" == "htmlhelp" (
+	%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+	goto end
+)
+
+if "%1" == "qthelp" (
+	%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+	echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Mypy.qhcp
+	echo.To view the help file:
+	echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Mypy.ghc
+	goto end
+)
+
+if "%1" == "devhelp" (
+	%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished.
+	goto end
+)
+
+if "%1" == "epub" (
+	%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The epub file is in %BUILDDIR%/epub.
+	goto end
+)
+
+if "%1" == "latex" (
+	%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+	goto end
+)
+
+if "%1" == "latexpdf" (
+	%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+	cd %BUILDDIR%/latex
+	make all-pdf
+	cd %BUILDDIR%/..
+	echo.
+	echo.Build finished; the PDF files are in %BUILDDIR%/latex.
+	goto end
+)
+
+if "%1" == "latexpdfja" (
+	%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+	cd %BUILDDIR%/latex
+	make all-pdf-ja
+	cd %BUILDDIR%/..
+	echo.
+	echo.Build finished; the PDF files are in %BUILDDIR%/latex.
+	goto end
+)
+
+if "%1" == "text" (
+	%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The text files are in %BUILDDIR%/text.
+	goto end
+)
+
+if "%1" == "man" (
+	%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The manual pages are in %BUILDDIR%/man.
+	goto end
+)
+
+if "%1" == "texinfo" (
+	%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
+	goto end
+)
+
+if "%1" == "gettext" (
+	%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
+	goto end
+)
+
+if "%1" == "changes" (
+	%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.The overview file is in %BUILDDIR%/changes.
+	goto end
+)
+
+if "%1" == "linkcheck" (
+	%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+	goto end
+)
+
+if "%1" == "doctest" (
+	%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+	goto end
+)
+
+if "%1" == "xml" (
+	%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The XML files are in %BUILDDIR%/xml.
+	goto end
+)
+
+if "%1" == "pseudoxml" (
+	%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
+	goto end
+)
+
+:end
diff --git a/docs/requirements-docs.txt b/docs/requirements-docs.txt
new file mode 100644
index 0000000..d20641e
--- /dev/null
+++ b/docs/requirements-docs.txt
@@ -0,0 +1,2 @@
+Sphinx >= 1.4.4
+sphinx-rtd-theme >= 0.1.9
diff --git a/docs/source/additional_features.rst b/docs/source/additional_features.rst
new file mode 100644
index 0000000..b9dd07f
--- /dev/null
+++ b/docs/source/additional_features.rst
@@ -0,0 +1,9 @@
+Additional features
+-------------------
+
+Several mypy features are not currently covered by this tutorial,
+including the following:
+
+- inheritance between generic classes
+- compatibility and subtyping of generic types, including covariance of generic types
+- ``super()``
diff --git a/docs/source/basics.rst b/docs/source/basics.rst
new file mode 100644
index 0000000..572364d
--- /dev/null
+++ b/docs/source/basics.rst
@@ -0,0 +1,194 @@
+Basics
+======
+
+This chapter introduces some core concepts of mypy, including function
+annotations, the ``typing`` module and library stubs. Read it carefully,
+as the rest of documentation may not make much sense otherwise.
+
+Function signatures
+*******************
+
+A function without a type annotation is considered dynamically typed:
+
+.. code-block:: python
+
+   def greeting(name):
+       return 'Hello, {}'.format(name)
+
+You can declare the signature of a function using the Python 3
+annotation syntax (Python 2 is discussed later in :ref:`python2`).
+This makes the the function statically typed, and that causes type
+checker report type errors within the function.
+
+Here's a version of the above function that is statically typed and
+will be type checked:
+
+.. code-block:: python
+
+   def greeting(name: str) -> str:
+       return 'Hello, {}'.format(name)
+
+If a function does not explicitly return a value we give the return
+type as ``None``. Using a ``None`` result in a statically typed
+context results in a type check error:
+
+.. code-block:: python
+
+   def p() -> None:
+       print('hello')
+
+   a = p()   # Type check error: p has None return value
+
+Arguments with default values can be annotated as follows:
+
+.. code-block:: python
+
+   def greeting(name: str, prefix: str = 'Mr.') -> str:
+      return 'Hello, {} {}'.format(name, prefix)
+
+Mixing dynamic and static typing
+********************************
+
+Mixing dynamic and static typing within a single file is often
+useful. For example, if you are migrating existing Python code to
+static typing, it may be easiest to do this incrementally, such as by
+migrating a few functions at a time. Also, when prototyping a new
+feature, you may decide to first implement the relevant code using
+dynamic typing and only add type signatures later, when the code is
+more stable.
+
+.. code-block:: python
+
+   def f():
+       1 + 'x'  # No static type error (dynamically typed)
+
+   def g() -> None:
+       1 + 'x'  # Type check error (statically typed)
+
+.. note::
+
+   The earlier stages of mypy, known as the semantic analysis, may
+   report errors even for dynamically typed functions. However, you
+   should not rely on this, as this may change in the future.
+
+The typing module
+*****************
+
+The ``typing`` module contains many definitions that are useful in
+statically typed code. You typically use ``from ... import`` to import
+them (we'll explain ``Iterable`` later in this document):
+
+.. code-block:: python
+
+   from typing import Iterable
+
+   def greet_all(names: Iterable[str]) -> None:
+       for name in names:
+           print('Hello, {}'.format(name))
+
+For brevity, we often omit the ``typing`` import in code examples, but
+you should always include it in modules that contain statically typed
+code.
+
+The presence or absence of the ``typing`` module does not affect
+whether your code is type checked; it is only required when you use
+one or more special features it defines.
+
+Type checking programs
+**********************
+
+You can type check a program by using the ``mypy`` tool, which is
+basically a linter -- it checks your program for errors without actually
+running it::
+
+   $ mypy program.py
+
+All errors reported by mypy are essentially warnings that you are free
+to ignore, if you so wish.
+
+The next chapter explains how to download and install mypy:
+:ref:`getting-started`.
+
+More command line options are documented in :ref:`command-line`.
+
+.. note::
+
+   Depending on how mypy is configured, you may have to explicitly use
+   the Python 3 interpreter to run mypy. The mypy tool is an ordinary
+   mypy (and so also Python) program. For example::
+
+     $ python3 -m mypy program.py
+
+.. _library-stubs:
+
+Library stubs and the Typeshed repo
+***********************************
+
+In order to type check code that uses library modules such as those
+included in the Python standard library, you need to have library
+*stubs*. A library stub defines a skeleton of the public interface
+of the library, including classes, variables and functions and
+their types, but dummy function bodies.
+
+For example, consider this code:
+
+.. code-block:: python
+
+  x = chr(4)
+
+Without a library stub, the type checker would have no way of
+inferring the type of ``x`` and checking that the argument to ``chr``
+has a valid type. Mypy incorporates the `typeshed
+<https://github.com/python/typeshed>`_ project, which contains library
+stubs for the Python builtins and the standard library. The stub for
+the builtins contains a definition like this for ``chr``:
+
+.. code-block:: python
+
+    def chr(code: int) -> str: ...
+
+In stub files we don't care about the function bodies, so we use 
+an ellipsis instead.  That ``...`` is three literal dots!
+
+Mypy complains if it can't find a stub (or a real module) for a
+library module that you import. You can create a stub easily; here is
+an overview:
+
+* Write a stub file for the library and store it as a ``.pyi`` file in
+  the same directory as the library module.
+* Alternatively, put your stubs (``.pyi`` files) in a directory
+  reserved for stubs (e.g., ``myproject/stubs``). In this case you
+  have to set the environment variable ``MYPYPATH`` to refer to the
+  directory.  For example::
+
+    $ export MYPYPATH=~/work/myproject/stubs
+
+Use the normal Python file name conventions for modules, e.g. ``csv.pyi``
+for module ``csv``. Use a subdirectory with ``__init__.pyi`` for packages.
+
+If a directory contains both a ``.py`` and a ``.pyi`` file for the
+same module, the ``.pyi`` file takes precedence. This way you can
+easily add annotations for a module even if you don't want to modify
+the source code. This can be useful, for example, if you use 3rd party
+open source libraries in your program (and there are no stubs in
+typeshed yet).
+
+That's it! Now you can access the module in mypy programs and type check
+code that uses the library. If you write a stub for a library module,
+consider making it available for other programmers that use mypy 
+by contributing it back to the typeshed repo.
+
+There is more information about creating stubs in the
+`mypy wiki <https://github.com/python/mypy/wiki/Creating-Stubs-For-Python-Modules>`_.
+The following sections explain the kinds of type annotations you can use
+in your programs and stub files.
+
+.. note::
+
+   You may be tempted to point ``MYPYPATH`` to the standard library or
+   to the ``site-packages`` directory where your 3rd party packages
+   are installed. This is almost always a bad idea -- you will likely
+   get tons of error messages about code you didn't write and that
+   mypy can't analyze all that well yet, and in the worst case
+   scenario mypy may crash due to some construct in a 3rd party
+   package that it didn't expect.
diff --git a/docs/source/builtin_types.rst b/docs/source/builtin_types.rst
new file mode 100644
index 0000000..4426df7
--- /dev/null
+++ b/docs/source/builtin_types.rst
@@ -0,0 +1,37 @@
+Built-in types
+==============
+
+These are examples of some of the most common built-in types:
+
+=================== ===============================
+Type                Description
+=================== ===============================
+``int``             integer of arbitrary size
+``float``           floating point number
+``bool``            boolean value
+``str``             unicode string
+``bytes``           8-bit string
+``object``          an arbitrary object (``object`` is the common base class)
+``List[str]``       list of ``str`` objects
+``Dict[str, int]``  dictionary from ``str`` keys to ``int`` values
+``Iterable[int]``   iterable object containing ints
+``Sequence[bool]``  sequence of booleans
+``Any``             dynamically typed value with an arbitrary type
+=================== ===============================
+
+The type ``Any`` and type constructors ``List``, ``Dict``,
+``Iterable`` and ``Sequence`` are defined in the ``typing`` module.
+
+The type ``Dict`` is a *generic* class, signified by type arguments within
+``[...]``. For example, ``Dict[int, str]`` is a dictionary from integers to
+strings and and ``Dict[Any, Any]`` is a dictionary of dynamically typed
+(arbitrary) values and keys. ``List`` is another generic class. ``Dict`` and
+``List`` are aliases for the built-ins ``dict`` and ``list``, respectively.
+
+``Iterable`` and ``Sequence`` are generic abstract base classes that
+correspond to Python protocols. For example, a ``str`` object or a
+``List[str]`` object is valid
+when ``Iterable[str]`` or ``Sequence[str]`` is expected. Note that even though
+they are similar to abstract base classes defined in ``abc.collections``
+(formerly ``collections``), they are not identical, since the built-in
+collection type objects do not support indexing.
diff --git a/docs/source/casts.rst b/docs/source/casts.rst
new file mode 100644
index 0000000..900ee0c
--- /dev/null
+++ b/docs/source/casts.rst
@@ -0,0 +1,39 @@
+.. _casts:
+
+Casts
+=====
+
+Mypy supports type casts that are usually used to coerce a statically
+typed value to a subtype. Unlike languages such as Java or C#,
+however, mypy casts are only used as hints for the type checker, and they
+don't perform a runtime type check. Use the function ``cast`` to perform a
+cast:
+
+.. code-block:: python
+
+   from typing import cast, List
+
+   o = [1] # type: object
+   x = cast(List[int], o)  # OK
+   y = cast(List[str], o)  # OK (cast performs no actual runtime check)
+
+To support runtime checking of casts such as the above, we'd have to check
+the types of all list items, which would be very inefficient for large lists.
+Use assertions if you want to
+perform an actual runtime check. Casts are used to silence spurious
+type checker warnings and give the type checker a little help when it can't
+quite understand what is going on.
+
+You don't need a cast for expressions with type ``Any``, or when
+assigning to a variable with type ``Any``, as was explained earlier.
+You can also use ``Any`` as the cast target type -- this lets you perform
+any operations on the result. For example:
+
+.. code-block:: python
+
+    from typing import cast, Any
+
+    x = 1
+    x + 'x'   # Type check error
+    y = cast(Any, x)
+    y + 'x'   # Type check OK (runtime error)
diff --git a/docs/source/cheat_sheet.rst b/docs/source/cheat_sheet.rst
new file mode 100644
index 0000000..b93a063
--- /dev/null
+++ b/docs/source/cheat_sheet.rst
@@ -0,0 +1,241 @@
+.. _cheat-sheet-py2:
+
+Mypy syntax cheat sheet (Python 2)
+==================================
+
+This document is a quick cheat sheet showing how the `PEP 484 <https://www.python.org/dev/peps/pep-0484/>`_ type
+language represents various common types in Python 2.
+
+.. note::
+
+   Technically many of the type annotations shown below are redundant,
+   because mypy can derive them from the type of the expression.  So
+   many of the examples have a dual purpose: show how to write the
+   annotation, and show the inferred types.
+
+
+Built-in types
+**************
+
+.. code-block:: python
+
+   from typing import List, Set, Dict, Tuple, Text, Optional
+
+   # For simple built-in types, just use the name of the type.
+   x = 1 # type: int
+   x = 1.0 # type: float
+   x = True # type: bool
+   x = "test" # type: str
+   x = u"test" # type: unicode
+
+   # For collections, the name of the type is capitalized, and the
+   # name of the type inside the collection is in brackets.
+   x = [1] # type: List[int]
+   x = set([6, 7]) # type: Set[int]
+
+   # For mappings, we need the types of both keys and values.
+   x = dict(field=2.0) # type: Dict[str, float]
+
+   # For tuples, we specify the types of all the elements.
+   x = (3, "yes", 7.5) # type: Tuple[int, str, float]
+
+   # For textual data, use Text.
+   # This is `unicode` in Python 2 and `str` in Python 3.
+   x = ["string", u"unicode"] # type: List[Text]
+
+   # Use Optional for values that could be None.
+   input_str = f() # type: Optional[str]
+   if input_str is not None:
+      print input_str
+
+
+Functions
+*********
+
+.. code-block:: python
+
+   from typing import Callable, Iterable
+
+   # This is how you annotate a function definition.
+   def stringify(num):
+       # type: (int) -> str
+       """Your function docstring goes here after the type definition."""
+       return str(num)
+
+   # This function has no parameters and also returns nothing. Annotations
+   # can also be placed on the same line as their function headers.
+   def greet_world(): # type: () -> None
+       print "Hello, world!"
+
+   # And here's how you specify multiple arguments.
+   def plus(num1, num2):
+       # type: (int, int) -> int
+       return num1 + num2
+
+   # Add type annotations for kwargs as though they were positional args.
+   def f(num1, my_float=3.5):
+       # type: (int, float) -> float
+       return num1 + my_float
+
+   # An argument can be declared positional-only by giving it a name
+   # starting with two underscores:
+   def quux(__x):
+       # type: (int) -> None
+       pass
+   quux(3)  # Fine
+   quux(__x=3)  # Error
+
+   # This is how you annotate a function value.
+   x = f # type: Callable[[int, float], float]
+
+   # A generator function that yields ints is secretly just a function that
+   # returns an iterable (see below) of ints, so that's how we annotate it.
+   def f(n):
+       # type: (int) -> Iterable[int]
+       i = 0
+       while i < n:
+           yield i
+           i += 1
+
+   # There's alternative syntax for functions with many arguments.
+   def send_email(address,     # type: Union[str, List[str]]
+                  sender,      # type: str
+                  cc,          # type: Optional[List[str]]
+                  bcc,         # type: Optional[List[str]]
+                  subject='',
+                  body=None    # type: List[str]
+                  ):
+       # type: (...) -> bool
+        <code>
+
+
+When you're puzzled or when things are complicated
+**************************************************
+
+.. code-block:: python
+
+   from typing import Union, Any, cast
+
+   # To find out what type mypy infers for an expression anywhere in
+   # your program, wrap it in reveal_type.  Mypy will print an error
+   # message with the type; remove it again before running the code.
+   reveal_type(1) # -> error: Revealed type is 'builtins.int'
+
+   # Use Union when something could be one of a few types.
+   x = [3, 5, "test", "fun"] # type: List[Union[int, str]]
+
+   # Use Any if you don't know the type of something or it's too
+   # dynamic to write a type for.
+   x = mystery_function() # type: Any
+
+   # This is how to deal with varargs.
+   # This makes each positional arg and each keyword arg a 'str'.
+   def call(self, *args, **kwargs):
+            # type: (*str, **str) -> str
+            request = make_request(*args, **kwargs)
+            return self.do_api_query(request)
+
+   
+   # Use `ignore` to suppress type-checking on a given line, when your
+   # code confuses mypy or runs into an outright bug in mypy.
+   # Good practice is to comment every `ignore` with a bug link
+   # (in mypy, typeshed, or your own code) or an explanation of the issue.
+   x = confusing_function() # type: ignore # https://github.com/python/mypy/issues/1167
+
+   # cast is a helper function for mypy that allows for guidance of how to convert types.
+   # it does not cast at runtime
+   a = [4]
+   b = cast(List[int], a)  # passes fine
+   c = cast(List[str], a)  # passes fine (no runtime check)
+   reveal_type(c)  # -> error: Revealed type is 'builtins.list[builtins.str]'
+   print(c)  # -> [4] the object is not cast
+
+   # TODO: explain "Need type annotation for variable" when
+   # initializing with None or an empty container
+
+
+Standard duck types
+*******************
+
+In typical Python code, many functions that can take a list or a dict
+as an argument only need their argument to be somehow "list-like" or
+"dict-like".  A specific meaning of "list-like" or "dict-like" (or
+something-else-like) is called a "duck type", and several duck types
+that are common in idiomatic Python are standardized.
+
+.. code-block:: python
+
+   from typing import Mapping, MutableMapping, Sequence, Iterable
+
+   # Use Iterable for generic iterables (anything usable in `for`),
+   # and Sequence where a sequence (supporting `len` and `__getitem__`) is required.
+   def f(iterable_of_ints):
+       # type: (Iterable[int]) -> List[str]
+       return [str(x) for x in iterator_of_ints]
+   f(range(1, 3))
+
+   # Mapping describes a dict-like object (with `__getitem__`) that we won't mutate,
+   # and MutableMapping one (with `__setitem__`) that we might.
+   def f(my_dict):
+       # type: (Mapping[int, str]) -> List[int]
+       return list(my_dict.keys())
+   f({3: 'yes', 4: 'no'})
+   def f(my_mapping):
+       # type: (MutableMapping[int, str]) -> Set[str]
+       my_dict[5] = 'maybe'
+       return set(my_dict.values())
+   f({3: 'yes', 4: 'no'})
+
+
+Classes
+*******
+
+.. code-block:: python
+
+   class MyClass(object):
+
+       # For instance methods, omit `self`.
+       def my_method(self, num, str1):
+           # type: (int, str) -> str
+           return num * str1
+
+       # The __init__ method doesn't return anything, so it gets return
+       # type None just like any other method that doesn't return anything.
+       def __init__(self):
+           # type: () -> None
+           pass
+
+   # User-defined classes are written with just their own names.
+   x = MyClass() # type: MyClass
+
+
+Other stuff
+***********
+
+.. code-block:: python
+
+   import sys
+   # typing.Match describes regex matches from the re module.
+   from typing import Match, AnyStr, IO
+   x = re.match(r'[0-9]+', "15") # type: Match[str]
+
+   # Use AnyStr for functions that should accept any kind of string
+   # without allowing different kinds of strings to mix.
+   def concat(a: AnyStr, b: AnyStr) -> AnyStr:
+       return a + b
+   concat(u"foo", u"bar")  # type: unicode
+   concat(b"foo", b"bar")  # type: bytes
+
+   # Use IO[] for functions that should accept or return any
+   # object that comes from an open() call. The IO[] does not
+   # distinguish between reading, writing or other modes.
+   def get_sys_IO(mode='w') -> IO[str]:
+       if mode == 'w':
+           return sys.stdout
+       elif mode == 'r':
+           return sys.stdin
+       else:
+           return sys.stdout
+
+   # TODO: add TypeVar and a simple generic function
+
diff --git a/docs/source/cheat_sheet_py3.rst b/docs/source/cheat_sheet_py3.rst
new file mode 100644
index 0000000..964e78e
--- /dev/null
+++ b/docs/source/cheat_sheet_py3.rst
@@ -0,0 +1,288 @@
+.. _cheat-sheet-py3:
+
+Mypy syntax cheat sheet (Python 3)
+==================================
+
+This document is a quick cheat sheet showing how the `PEP 484 <https://www.python.org/dev/peps/pep-0484/>`_ type
+language represents various common types in Python 3. Unless otherwise noted, the syntax is valid on all versions of Python 3.
+
+.. note::
+
+   Technically many of the type annotations shown below are redundant,
+   because mypy can derive them from the type of the expression.  So
+   many of the examples have a dual purpose: show how to write the
+   annotation, and show the inferred types.
+
+
+Built-in types
+**************
+
+.. code-block:: python
+
+   from typing import List, Set, Dict, Tuple, Text, Optional, AnyStr
+
+   # For simple built-in types, just use the name of the type.
+   x = 1  # type: int
+   x = 1.0  # type: float
+   x = True  # type: bool
+   x = "test"  # type: str
+   x = u"test"  # type: str
+   x = b"test"  # type: bytes
+
+   # For collections, the name of the type is capitalized, and the
+   # name of the type inside the collection is in brackets.
+   x = [1]  # type: List[int]
+   x = {6, 7}  # type: Set[int]
+
+   # For mappings, we need the types of both keys and values.
+   x = {'field': 2.0}  # type: Dict[str, float]
+
+   # For tuples, we specify the types of all the elements.
+   x = (3, "yes", 7.5)  # type: Tuple[int, str, float]
+
+   # For textual data, use Text.
+   # This is `unicode` in Python 2 and `str` in Python 3.
+   x = ["string", u"unicode"]  # type: List[Text]
+
+
+
+   # Use Optional for values that could be None.
+   input_str = f()  # type: Optional[str]
+   if input_str is not None:
+      print(input_str)
+
+
+Functions
+*********
+
+Python 3 introduces an annotation syntax for function declarations in `PEP 3107 <https://www.python.org/dev/peps/pep-3107/>`_.
+
+.. code-block:: python
+
+   from typing import Callable, Iterable, Union, Optional, List
+
+   # This is how you annotate a function definition.
+   def stringify(num: int) -> str:
+       return str(num)
+       
+   # And here's how you specify multiple arguments.
+   def plus(num1: int, num2: int) -> int:
+       return num1 + num2
+
+   # Add type annotations for kwargs as though they were positional args.
+   def f(num1: int, my_float: float = 3.5) -> float:
+       return num1 + my_float
+
+   # An argument can be declared positional-only by giving it a name
+   # starting with two underscores:
+   def quux(__x: int) -> None:
+       pass
+   quux(3)  # Fine
+   quux(__x=3)  # Error
+
+   # This is how you annotate a function value.
+   x = f # type: Callable[[int, float], float]
+
+   # A generator function that yields ints is secretly just a function that
+   # returns an iterable (see below) of ints, so that's how we annotate it.
+   def f(n: int) -> Iterable[int]:
+       i = 0
+       while i < n:
+           yield i
+           i += 1
+
+   # For a function with many arguments, you can of course split it over multiple lines
+   def send_email(address: Union[str, List[str]],
+                  sender: str,
+                  cc: Optional[List[str]],
+                  bcc: Optional[List[str]],
+                  subject='',
+                  body: List[str] = None
+                  ) -> bool:
+       
+       ...
+
+
+When you're puzzled or when things are complicated
+**************************************************
+
+.. code-block:: python
+
+   from typing import Union, Any, List, cast
+
+   # To find out what type mypy infers for an expression anywhere in
+   # your program, wrap it in reveal_type.  Mypy will print an error
+   # message with the type; remove it again before running the code.
+   reveal_type(1)  # -> error: Revealed type is 'builtins.int'
+
+   # Use Union when something could be one of a few types.
+   x = [3, 5, "test", "fun"]  # type: List[Union[int, str]]
+
+   # Use Any if you don't know the type of something or it's too
+   # dynamic to write a type for.
+   x = mystery_function()  # type: Any
+
+   # Use `ignore` to suppress type-checking on a given line, when your
+   # code confuses mypy or runs into an outright bug in mypy.
+   # Good practice is to comment every `ignore` with a bug link
+   # (in mypy, typeshed, or your own code) or an explanation of the issue.
+   x = confusing_function()  # type: ignore # https://github.com/python/mypy/issues/1167
+
+   # cast is a helper function for mypy that allows for guidance of how to convert types.
+   # it does not cast at runtime
+   a = [4]
+   b = cast(List[int], a)  # passes fine
+   c = cast(List[str], a)  # passes fine (no runtime check)
+   reveal_type(c)  # -> error: Revealed type is 'builtins.list[builtins.str]'
+   print(c)  # -> [4] the object is not cast
+
+   # TODO: explain "Need type annotation for variable" when
+   # initializing with None or an empty container
+
+
+Standard duck types
+*******************
+
+In typical Python code, many functions that can take a list or a dict
+as an argument only need their argument to be somehow "list-like" or
+"dict-like".  A specific meaning of "list-like" or "dict-like" (or
+something-else-like) is called a "duck type", and several duck types
+that are common in idiomatic Python are standardized.
+
+.. code-block:: python
+
+   from typing import Mapping, MutableMapping, Sequence, Iterable, List, Set
+
+   # Use Iterable for generic iterables (anything usable in `for`),
+   # and Sequence where a sequence (supporting `len` and `__getitem__`) is required.
+   def f(iterable_of_ints: Iterable[int]) -> List[str]:
+       return [str(x) for x in iterable_of_ints]
+   f(range(1, 3))
+
+   # Mapping describes a dict-like object (with `__getitem__`) that we won't mutate,
+   # and MutableMapping one (with `__setitem__`) that we might.
+   def f(my_dict: Mapping[int, str])-> List[int]:
+       return list(my_dict.keys())
+   f({3: 'yes', 4: 'no'})
+   def f(my_mapping: MutableMapping[int, str]) -> Set[str]:
+       my_mapping[5] = 'maybe'
+       return set(my_mapping.values())
+   f({3: 'yes', 4: 'no'})
+
+
+Classes
+*******
+
+.. code-block:: python
+
+   class MyClass:
+       # The __init__ method doesn't return anything, so it gets return
+       # type None just like any other method that doesn't return anything.
+       def __init__(self) -> None:
+           ...
+       # For instance methods, omit `self`.
+       def my_method(self, num: int, str1: str) -> str:
+           return num * str1
+
+
+
+   # User-defined classes are written with just their own names.
+   x = MyClass() # type: MyClass
+
+
+Other stuff
+***********
+
+.. code-block:: python
+
+   import sys
+   import re
+   # typing.Match describes regex matches from the re module.
+   from typing import Match, AnyStr, IO
+   x = re.match(r'[0-9]+', "15")  # type: Match[str]
+
+   # You can use AnyStr to indicate that any string type will work
+   # but not to mix types
+   def full_name(first: AnyStr, last: AnyStr) -> AnyStr:
+       return first+last
+   full_name('Jon','Doe')  # same str ok
+   full_name(b'Bill', b'Bit')  # same binary ok
+   full_name(b'Terry', 'Trouble')  # different str types, fails
+
+   # Use IO[] for functions that should accept or return any
+   # object that comes from an open() call. The IO[] does not
+   # distinguish between reading, writing or other modes.
+   def get_sys_IO(mode='w') -> IO[str]:
+       if mode == 'w':
+           return sys.stdout
+       elif mode == 'r':
+           return sys.stdin
+       else:
+           return sys.stdout
+
+   # forward references are useful if you want to referemce a class before it is designed
+   
+   def f(foo: A) -> int:  # this will fail
+       ...
+   
+   class A:
+       ...
+       
+   # however, using the string 'A', it will pass as long as there is a class of that name later on
+   def f(foo: 'A') -> int:
+       ...
+
+   # TODO: add TypeVar and a simple generic function
+
+Variable Annotation in Python 3.6 with PEP 526
+**********************************************
+
+Python 3.6 brings new syntax for annotating variables with `PEP 526 <https://www.python.org/dev/peps/pep-0526/>`_.
+Mypy brings limited support for PEP 526 annotations.
+
+
+.. code-block:: python
+
+   # annotation is similar to arguments to functions
+   name: str = "Eric Idle"
+   
+   # class instances can be annotated as follows
+   mc : MyClass = MyClass()
+   
+   # tuple packing can be done as follows
+   tu: Tuple[str, ...] = ('a', 'b', 'c')
+   
+   # annotations are not checked at runtime
+   year: int = '1972'  # error in type checking, but works at runtime
+   
+   # these are all equivalent
+   hour = 24 # type: int
+   hour: int; hour = 24
+   hour: int = 24
+   
+   # you do not (!) need to initialize a variable to annotate it
+   a: int # ok for type checking and runtime
+   
+   # which is useful in conditional branches
+   child: bool
+   if age < 18:
+       child = True
+   else:
+       child = False
+   
+   # annotations for classes are for instance variables (those created in __init__ or __new__)
+   class Battery:
+       charge_percent: int = 100  # this is an instance variable with a default value
+       capacity: int  # an instance variable without a default
+       
+   # you can use the ClassVar annotation to make the variable a class variable instead of an instance variable.
+   class Car:
+       seats: ClassVar[int] = 4
+       passengers: ClassVar[List[str]]
+       
+    # You can also declare the type of an attribute in __init__
+    class Box:
+        def __init__(self) -> None:
+            self.items: List[str] = []
+   
+Please see :ref:`python-36` for more on mypy's compatability with Python 3.6's new features.
diff --git a/docs/source/class_basics.rst b/docs/source/class_basics.rst
new file mode 100644
index 0000000..dc778d3
--- /dev/null
+++ b/docs/source/class_basics.rst
@@ -0,0 +1,157 @@
+Class basics
+============
+
+Instance and class attributes
+*****************************
+
+Mypy type checker detects if you are trying to access a missing
+attribute, which is a very common programming error. For this to work
+correctly, instance and class attributes must be defined or
+initialized within the class. Mypy infers the types of attributes:
+
+.. code-block:: python
+
+   class A:
+       def __init__(self, x: int) -> None:
+           self.x = x     # Attribute x of type int
+
+   a = A(1)
+   a.x = 2       # OK
+   a.y = 3       # Error: A has no attribute y
+
+This is a bit like each class having an implicitly defined
+``__slots__`` attribute. This is only enforced during type
+checking and not when your program is running.
+
+You can declare types of variables in the class body explicitly using
+a type comment:
+
+.. code-block:: python
+
+   class A:
+       x = None  # type: List[int]  # Declare attribute x of type List[int]
+
+   a = A()
+   a.x = [1]     # OK
+
+As in Python, a variable defined in the class body can used as a class
+or an instance variable.
+
+Similarly, you can give explicit types to instance variables defined
+in a method:
+
+.. code-block:: python
+
+   class A:
+       def __init__(self) -> None:
+           self.x = []  # type: List[int]
+
+       def f(self) -> None:
+           self.y = 0  # type: Any
+
+You can only define an instance variable within a method if you assign
+to it explicitly using ``self``:
+
+.. code-block:: python
+
+   class A:
+       def __init__(self) -> None:
+           self.y = 1   # Define y
+           a = self
+           a.x = 1      # Error: x not defined
+
+Overriding statically typed methods
+***********************************
+
+When overriding a statically typed method, mypy checks that the
+override has a compatible signature:
+
+.. code-block:: python
+
+   class A:
+       def f(self, x: int) -> None:
+           ...
+
+   class B(A):
+       def f(self, x: str) -> None:   # Error: type of x incompatible
+           ...
+
+   class C(A):
+       def f(self, x: int, y: int) -> None:  # Error: too many arguments
+           ...
+
+   class D(A):
+       def f(self, x: int) -> None:   # OK
+           ...
+
+.. note::
+
+   You can also vary return types **covariantly** in overriding. For
+   example, you could override the return type ``object`` with a subtype
+   such as ``int``.
+
+You can also override a statically typed method with a dynamically
+typed one. This allows dynamically typed code to override methods
+defined in library classes without worrying about their type
+signatures.
+
+There is no runtime enforcement that the method override returns a
+value that is compatible with the original return type, since
+annotations have no effect at runtime:
+
+.. code-block:: python
+
+   class A:
+       def inc(self, x: int) -> int:
+           return x + 1
+
+   class B(A):
+       def inc(self, x):       # Override, dynamically typed
+           return 'hello'
+
+   b = B()
+   print(b.inc(1))   # hello
+   a = b # type: A
+   print(a.inc(1))   # hello
+
+Abstract base classes and multiple inheritance
+**********************************************
+
+Mypy uses Python abstract base classes for protocol types. There are
+several built-in abstract base classes types (for example,
+``Sequence``, ``Iterable`` and ``Iterator``). You can define abstract
+base classes using the ``abc.ABCMeta`` metaclass and the
+``abc.abstractmethod`` function decorator.
+
+.. code-block:: python
+
+   from abc import ABCMeta, abstractmethod
+   import typing
+
+   class A(metaclass=ABCMeta):
+       @abstractmethod
+       def foo(self, x: int) -> None: pass
+
+       @abstractmethod
+       def bar(self) -> str: pass
+
+   class B(A):
+       def foo(self, x: int) -> None: ...
+       def bar(self) -> str:
+           return 'x'
+
+   a = A() # Error: A is abstract
+   b = B() # OK
+
+Unlike most Python code, abstract base classes are likely to play a
+significant role in many complex mypy programs.
+
+A class can inherit any number of classes, both abstract and
+concrete. As with normal overrides, a dynamically typed method can
+implement a statically typed abstract method defined in an abstract
+base class.
+
+.. note::
+
+   There are also plans to support more Python-style "duck typing" in
+   the type system. The details are still open.
diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst
new file mode 100644
index 0000000..3ada442
--- /dev/null
+++ b/docs/source/command_line.rst
@@ -0,0 +1,413 @@
+.. _command-line:
+
+The mypy command line
+=====================
+
+This section documents many of mypy's command line flags.  A quick
+summary of command line flags can always be printed using the ``-h``
+flag (or its long form ``--help``)::
+
+  $ mypy -h
+  usage: mypy [-h] [-v] [-V] [--python-version x.y] [--platform PLATFORM] [-2]
+              [--ignore-missing-imports]
+              [--follow-imports {normal,silent,skip,error}]
+              [--disallow-untyped-calls] [--disallow-untyped-defs]
+              [--check-untyped-defs] [--disallow-subclassing-any]
+              [--warn-incomplete-stub] [--warn-redundant-casts]
+              [--warn-no-return] [--warn-unused-ignores] [--show-error-context]
+              [--fast-parser] [-i] [--cache-dir DIR] [--strict-optional]
+              [--strict-optional-whitelist [GLOB [GLOB ...]]] [--strict]
+              [--junit-xml JUNIT_XML] [--pdb] [--show-traceback] [--stats]
+              [--inferstats] [--custom-typing MODULE]
+              [--custom-typeshed-dir DIR] [--scripts-are-modules]
+              [--config-file CONFIG_FILE] [--show-column-numbers]
+              [--find-occurrences CLASS.MEMBER] [--strict-boolean]
+              [--cobertura-xml-report DIR] [--html-report DIR]
+              [--linecount-report DIR] [--linecoverage-report DIR]
+              [--memory-xml-report DIR] [--old-html-report DIR]
+              [--txt-report DIR] [--xml-report DIR] [--xslt-html-report DIR]
+              [--xslt-txt-report DIR] [-m MODULE] [-c PROGRAM_TEXT] [-p PACKAGE]
+              [files [files ...]]
+
+  (etc., too long to show everything here)
+
+Specifying files and directories to be checked
+**********************************************
+
+You've already seen ``mypy program.py`` as a way to type check the
+file ``program.py``.  More generally you can pass any number of files
+and directories on the command line and they will all be type checked
+together.
+
+- Files ending in ``.py`` (and stub files ending in ``.pyi``) are
+  checked as Python modules.
+
+- Files not ending in ``.py`` or ``.pyi`` are assumed to be Python
+  scripts and checked as such.
+
+- Directories representing Python packages (i.e. containing a
+  ``__init__.py[i]`` file) are checked as Python packages; all
+  submodules and subpackages will be checked (subpackages must
+  themselves have a ``__init__.py[i]`` file).
+
+- Directories that don't represent Python packages (i.e. not directly
+  containing an ``__init__.py[i]`` file) are checked as follows:
+
+  - All ``*.py[i]`` files contained directly therein are checked as
+    toplevel Python modules;
+
+  - All packages contained directly therein (i.e. immediate
+    subdirectories with an ``__init__.py[i]`` file) are checked as
+    toplevel Python packages.
+
+One more thing about checking modules and packages: if the directory
+*containing* a module or package specified on the command line has an
+``__init__.py[i]`` file, mypy assigns these an absolute module name by
+crawling up the path until no ``__init__.py[i]`` file is found.  For
+example, suppose we run the command ``mypy foo/bar/baz.py`` where
+``foo/bar/__init__.py`` exists but ``foo/__init__.py`` does not.  Then
+the module name assumed is ``bar.baz`` and the directory ``foo`` is
+added to mypy's module search path.  On the other hand, if
+``foo/bar/__init__.py`` did not exist, ``foo/bar`` would be added to
+the module search path instead, and the module name assumed is just
+``baz``.
+
+If a script (a file not ending in ``.py[i]``) is processed, the module
+name assumed is always ``__main__`` (matching the behavior of the
+Python interpreter).
+
+Other ways of specifying code to be checked
+*******************************************
+
+The flag ``-m`` (long form: ``--module``) lets you specify a module
+name to be found using the default module search path.  The module
+name may contain dots.  For example::
+
+  $ mypy -m html.parser
+
+will type check the module ``html.parser`` (this happens to be a
+library stub).
+
+The flag ``-p`` (long form: ``--package``) is similar to ``-m`` but
+you give it a package name and it will type check all submodules and
+subpackages (recursively) of that package.  (If you pass a package
+name to ``-m`` it will just type check the package's ``__init__.py``
+and anything imported from there.)  For example::
+
+  $ mypy -p html
+
+will type check the entire ``html`` package (of library stubs).
+
+Finally the flag ``-c`` (long form: ``--command``) will take a string
+from the command line and type check it as a small program.  For
+example::
+
+  $ mypy -c 'x = [1, 2]; print(x())'
+
+will type check that little program (and complain that ``List[int]``
+is not callable).
+
+Reading a list of files from a file
+***********************************
+
+Finally, any command-line argument starting with ``@`` reads additional
+command-line arguments from the file following the ``@`` character.
+This is primarily useful if you have a file containing a list of files
+that you want to be type-checked: instead of using shell syntax like::
+
+  mypy $(cat file_of_files)
+
+you can use this instead::
+
+  mypy @file_of_files
+
+Such a file can also contain other flags, but a preferred way of
+reading flags (not files) from a file is to use a
+:ref:`configuration file <config-file>`.
+
+
+.. _finding-imports:
+
+How imports are found
+*********************
+
+When mypy encounters an `import` statement it tries to find the module
+on the file system, similar to the way Python finds it.
+However, there are some differences.
+
+First, mypy has its own search path.
+This is computed from the following items:
+
+- The ``MYPYPATH`` environment variable
+  (a colon-separated list of directories).
+- The directories containing the sources given on the command line
+  (see below).
+- The relevant directories of the
+  `typeshed <https://github.com/python/typeshed>`_ repo.
+
+For sources given on the command line, the path is adjusted by crawling
+up from the given file or package to the nearest directory that does not
+contain an ``__init__.py`` or ``__init__.pyi`` file.
+
+Second, mypy searches for stub files in addition to regular Python files
+and packages.
+The rules for searching a module ``foo`` are as follows:
+
+- The search looks in each of the directories in the search path
+  (see above) until a match is found.
+- If a package named ``foo`` is found (i.e. a directory
+  ``foo`` containing an ``__init__.py`` or ``__init__.pyi`` file)
+  that's a match.
+- If a stub file named ``foo.pyi`` is found, that's a match.
+- If a Python module named ``foo.py`` is found, that's a match.
+
+These matches are tried in order, so that if multiple matches are found
+in the same directory on the search path
+(e.g. a package and a Python file, or a stub file and a Python file)
+the first one in the above list wins.
+
+In particular, if a Python file and a stub file are both present in the
+same directory on the search path, only the stub file is used.
+(However, if the files are in different directories, the one found
+in the earlier directory is used.)
+
+NOTE: These rules are relevant to the following section too:
+the ``--follow-imports`` flag described below is applied _after_ the
+above algorithm has determined which package, stub or module to use.
+
+.. _follow-imports:
+
+Following imports or not?
+*************************
+
+When you're first attacking a large existing codebase with mypy, you
+may only want to check selected files.  For example, you may only want
+to check those files to which you have already added annotations.
+This is easily accomplished using a shell pipeline like this::
+
+  mypy $(find . -name \*.py | xargs grep -l '# type:')
+
+(While there are many improvements possible to make this example more
+robust, this is not the place for a tutorial in shell programming.)
+
+However, by default mypy doggedly tries to :ref:`follow imports
+<finding-imports>`.  This may cause several types of problems that you
+may want to silence during your initial conquest:
+
+- Your code may import library modules for which no stub files exist
+  yet.  This can cause a lot of errors like the following::
+
+    main.py:1: error: No library stub file for standard library module 'antigravity'
+    main.py:2: error: No library stub file for module 'flask'
+    main.py:3: error: Cannot find module named 'sir_not_appearing_in_this_film'
+
+  If you see only a few of these you may be able to silence them by
+  putting ``# type: ignore`` on the respective ``import`` statements,
+  but it's usually easier to silence all such errors by using
+  :ref:`--ignore-missing-imports <ignore-missing-imports>`.
+
+- Your project's directory structure may hinder mypy in finding
+  certain modules that are part of your project, e.g. modules hidden
+  away in a subdirectory that's not a package.  You can usually deal
+  with this by setting the ``MYPYPATH`` variable (see
+  :ref:`finding-imports`).
+
+- When following imports mypy may find a module that's part of your
+  project but which you haven't annotated yet, mypy may report errors
+  for the top level code in that module (where the top level includes
+  class bodies and function/method default values).  Here the
+  ``--follow-imports`` flag comes in handy.
+
+The ``--follow-imports`` flag takes a mandatory string value that can
+take one of four values.  It only applies to modules for which a
+``.py`` file is found (but no corresponding ``.pyi`` stub file) and
+that are not given on the command line.  Passing a package or
+directory on the command line implies all modules in that package or
+directory.  The four possible values are:
+
+- ``normal`` (the default) follow imports normally and type check all
+  top level code (as well as the bodies of all functions and methods
+  with at least one type annotation in the signature).
+
+- ``silent`` follow imports normally and even "type check" them
+  normally, but *suppress any error messages*. This is typically the
+  best option for a new codebase.
+
+- ``skip`` *don't* follow imports, silently replacing the module (and
+  everything imported *from* it) with an object of type ``Any``.
+  (This option used to be known as ``--silent-imports`` and while it
+  is very powerful it can also cause hard-to-debug errors, hence the
+  recommendation of using ``silent`` instead.)
+
+- ``error`` the same behavior as ``skip`` but not quite as silent --
+  it flags the import as an error, like this::
+
+    main.py:1: note: Import of 'submodule' ignored
+    main.py:1: note: (Using --follow-imports=error, module not passed on command line)
+
+
+Additional command line flags
+*****************************
+
+Here are some more useful flags:
+
+.. _ignore-missing-imports:
+
+- ``--ignore-missing-imports`` suppresses error messages about imports
+  that cannot be resolved (see :ref:`follow-imports` for some examples).
+
+- ``--strict-optional`` enables experimental strict checking of ``Optional[...]``
+  types and ``None`` values. Without this option, mypy doesn't generally check the
+  use of ``None`` values -- they are valid everywhere. See :ref:`strict_optional` for
+  more about this feature.
+
+- ``--strict-optional-whitelist`` attempts to suppress strict Optional-related
+  errors in non-whitelisted files.  Takes an arbitrary number of globs as the
+  whitelist.  This option is intended to be used to incrementally roll out
+  ``--strict-optional`` to a large codebase that already has mypy annotations.
+  However, this flag comes with some significant caveats.  It does not suppress
+  all errors caused by turning on ``--strict-optional``, only most of them, so
+  there may still be a bit of upfront work to be done before it can be used in
+  CI.  It will also suppress some errors that would be caught in a
+  non-strict-Optional run.  Therefore, when using this flag, you should also
+  re-check your code without ``--strict-optional`` to ensure new type errors
+  are not introduced.
+
+- ``--disallow-untyped-defs`` reports an error whenever it encounters
+  a function definition without type annotations.
+
+- ``--check-untyped-defs`` is less severe than the previous option --
+  it type checks the body of every function, regardless of whether it
+  has type annotations.  (By default the bodies of functions without
+  annotations are not type checked.)  It will assume all arguments
+  have type ``Any`` and always infer ``Any`` as the return type.
+
+- ``--disallow-untyped-calls`` reports an error whenever a function
+  with type annotations calls a function defined without annotations.
+
+.. _disallow-subclassing-any:
+
+- ``--disallow-subclassing-any`` reports an error whenever a class
+  subclasses a value of type ``Any``.  This may occur when the base
+  class is imported from a module that doesn't exist (when using
+  :ref:`--ignore-missing-imports <ignore-missing-imports>`) or is
+  ignored due to :ref:`--follow-imports=skip <follow-imports>` or a
+  ``# type: ignore`` comment on the ``import`` statement.  Since the
+  module is silenced, the imported class is given a type of ``Any``.
+  By default mypy will assume that the subclass correctly inherited
+  the base class even though that may not actually be the case.  This
+  flag makes mypy raise an error instead.
+
+- ``--incremental`` is an experimental option that enables incremental
+  type checking. When enabled, mypy caches results from previous runs
+  to speed up type checking. Incremental mode can help when most parts
+  of your program haven't changed since the previous mypy run.
+
+- ``--fast-parser`` enables an experimental parser implemented in C that
+  is faster than the default parser and supports multi-line comment
+  function annotations (see :ref:`multi_line_annotation` for the details).
+
+- ``--python-version X.Y`` will make mypy typecheck your code as if it were
+  run under Python version X.Y. Without this option, mypy will default to using
+  whatever version of Python is running mypy. Note that the ``-2`` and
+  ``--py2`` flags are aliases for ``--python-version 2.7``. See
+  :ref:`version_and_platform_checks` for more about this feature.
+
+- ``--platform PLATFORM`` will make mypy typecheck your code as if it were
+  run under the the given operating system. Without this option, mypy will
+  default to using whatever operating system you are currently using. See
+  :ref:`version_and_platform_checks` for more about this feature.
+
+- ``--show-column-numbers`` will add column offsets to error messages,
+  for example, the following indicates an error in line 12, column 9
+  (note that column offsets are 0-based):
+
+  .. code-block:: python
+
+     main.py:12:9: error: Unsupported operand types for / ("int" and "str")
+
+- ``--scripts-are-modules`` will give command line arguments that
+  appear to be scripts (i.e. files whose name does not end in ``.py``)
+  a module name derived from the script name rather than the fixed
+  name ``__main__``.  This allows checking more than one script in a
+  single mypy invocation.  (The default ``__main__`` is technically
+  more correct, but if you have many scripts that import a large
+  package, the behavior enabled by this flag is often more
+  convenient.)
+
+- ``--custom-typeshed-dir DIR`` specifies the directory where mypy looks for
+  typeshed stubs, instead of the typeshed that ships with mypy.  This is
+  primarily intended to make it easier to test typeshed changes before
+  submitting them upstream, but also allows you to use a forked version of
+  typeshed.
+
+.. _config-file-flag:
+
+- ``--config-file CONFIG_FILE`` causes configuration settings to be
+  read from the given file.  By default settings are read from ``mypy.ini``
+  in the current directory.  Settings override mypy's built-in defaults
+  and command line flags can override settings.  See :ref:`config-file`
+  for the syntax of configuration files.
+
+- ``--junit-xml JUNIT_XML`` will make mypy generate a JUnit XML test
+  result document with type checking results. This can make it easier
+  to integrate mypy with continuous integration (CI) tools.
+
+- ``--find-occurrences CLASS.MEMBER`` will make mypy print out all
+  usages of a class member based on static type information. This
+  feature is experimental.
+
+- ``--cobertura-xml-report DIR`` causes mypy to generate a Cobertura
+  XML type checking coverage report.
+
+- ``--warn-no-return`` causes mypy to generate errors for missing return
+  statements on some execution paths. Mypy doesn't generate these errors
+  for functions with ``None`` or ``Any`` return types. Mypy
+  also currently ignores functions with an empty body or a body that is
+  just ellipsis (``...``), since these can be valid as abstract methods.
+
+- ``--strict-boolean`` will make using non-boolean expressions in conditions
+  an error. This means ``if x`` and ``while x`` are disallowed when ``x`` has any
+  type other than ``bool``. Instead use explicit checks like ``if x > 0`` or
+  ``while x is not None``.
+
+- ``--strict`` mode enables all optional error checking flags.  You can see the
+  list of flags enabled by strict mode in the full ``mypy -h`` output.
+
+For the remaining flags you can read the full ``mypy -h`` output.
+
+.. note::
+
+   Command line flags are liable to change between releases.
+
+.. _integrating-mypy:
+
+Integrating mypy into another Python application
+************************************************
+
+It is possible to integrate mypy into another Python 3 application by
+importing ``mypy.api`` and calling the ``run`` function with a parameter of type ``List[str]``, containing
+what normally would have been the command line arguments to mypy.
+
+Function ``run`` returns a ``Tuple[str, str, int]``, namely
+``(<normal_report>, <error_report>, <exit_status>)``, in which ``<normal_report>``
+is what mypy normally writes to ``sys.stdout``, ``<error_report>`` is what mypy
+normally writes to ``sys.stderr`` and ``exit_status`` is the exit status mypy normally
+returns to the operating system.
+
+A trivial example of using the api is the following::
+
+    import sys
+    from mypy import api
+
+    result = api.run(sys.argv[1:])
+
+    if result[0]:
+        print('\nType checking report:\n')
+        print(result[0])  # stdout
+
+    if result[1]:
+        print('\nError report:\n')
+        print(result[1])  # stderr
+
+    print ('\nExit status:', result[2])
diff --git a/docs/source/common_issues.rst b/docs/source/common_issues.rst
new file mode 100644
index 0000000..3477d90
--- /dev/null
+++ b/docs/source/common_issues.rst
@@ -0,0 +1,386 @@
+.. _common_issues:
+
+Common issues
+=============
+
+This section has examples of cases when you need to update your code
+to use static typing, and ideas for working around issues if mypy
+doesn't work as expected. Statically typed code is often identical to
+normal Python code, but sometimes you need to do things slightly
+differently.
+
+Can't install mypy using pip
+----------------------------
+
+If installation fails, you've probably hit one of these issues:
+
+* Mypy needs Python 3.3 or later to run.
+* You may have to run pip like this:
+  ``python3 -m pip install mypy``.
+
+.. _annotations_needed:
+
+No errors reported for obviously wrong code
+-------------------------------------------
+
+There are several common reasons why obviously wrong code is not
+flagged as an error.
+
+- **The function containing the error is not annotated.** Functions that
+  do not have any annotations (neither for any argument nor for the
+  return type) are not type-checked, and even the most blatant type
+  errors (e.g. ``2 + 'a'``) pass silently.  The solution is to add
+  annotations.
+
+  Example:
+
+  .. code-block:: python
+
+      def foo(a):
+          return '(' + a.split() + ')'  # No error!
+
+  This gives no error even though ``a.split()`` is "obviously" a list
+  (the author probably meant ``a.strip()``).  The error is reported
+  once you add annotations:
+
+  .. code-block:: python
+
+      def foo(a: str) -> str:
+          return '(' + a.split() + ')'
+      # error: Unsupported operand types for + ("str" and List[str])
+
+  If you don't know what types to add, you can use ``Any``, but beware:
+
+- **One of the values involved has type ``Any``.** Extending the above
+  example, if we were to leave out the annotation for ``a``, we'd get
+  no error:
+
+  .. code-block:: python
+
+      def foo(a) -> str:
+          return '(' + a.split() + ')'  # No error!
+
+  The reason is that if the type of ``a`` is unknown, the type of
+  ``a.split()`` is also unknown, so it is inferred as having type
+  ``Any``, and it is no error to add a string to an ``Any``.
+
+  If you're having trouble debugging such situations,
+  :ref:`reveal_type() <reveal-type>` might come in handy.
+
+  Note that sometimes library stubs have imprecise type information,
+  e.g. the ``pow()`` builtin returns ``Any`` (see `typeshed issue 285
+  <https://github.com/python/typeshed/issues/285>`_ for the reason).
+
+- **Some imports may be silently ignored**.  Another source of
+  unexpected ``Any`` values are the :ref:`"--ignore-missing-imports"
+  <ignore-missing-imports>` and :ref:`"--follow-imports=skip"
+  <follow-imports>` flags.  When you use ``--ignore-missing-imports``,
+  any imported module that cannot be found is silently replaced with
+  ``Any``.  When using ``--follow-imports=skip`` the same is true for
+  modules for which a ``.py`` file is found but that are not specified
+  on the command line.  (If a ``.pyi`` stub is found it is always
+  processed normally, regardless of the value of
+  ``--follow-imports``.)  To help debug the former situation (no
+  module found at all) leave out ``--ignore-missing-imports``; to get
+  clarity about the latter use ``--follow-imports=error``.  You can
+  read up about these and other useful flags in :ref:`command-line`.
+
+.. _silencing_checker:
+
+Spurious errors and locally silencing the checker
+-------------------------------------------------
+
+You can use a ``# type: ignore`` comment to silence the type checker
+on a particular line. For example, let's say our code is using
+the C extension module ``frobnicate``, and there's no stub available.
+Mypy will complain about this, as it has no information about the
+module:
+
+.. code-block:: python
+
+    import frobnicate  # Error: No module "frobnicate"
+    frobnicate.start()
+
+You can add a ``# type: ignore`` comment to tell mypy to ignore this
+error:
+
+.. code-block:: python
+
+    import frobnicate  # type: ignore
+    frobnicate.start()  # Okay!
+
+The second line is now fine, since the ignore comment causes the name
+``frobnicate`` to get an implicit ``Any`` type.
+
+.. note::
+
+    The ``# type: ignore`` comment will only assign the implicit ``Any``
+    type if mypy cannot find information about that particular module. So,
+    if we did have a stub available for ``frobnicate`` then mypy would
+    ignore the ``# type: ignore`` comment and typecheck the stub as usual.
+
+Types of empty collections
+--------------------------
+
+You often need to specify the type when you assign an empty list or
+dict to a new variable, as mentioned earlier:
+
+.. code-block:: python
+
+   a = []  # type: List[int]
+
+Without the annotation mypy can't always figure out the
+precise type of ``a``.
+
+You can use a simple empty list literal in a dynamically typed function (as the
+type of ``a`` would be implicitly ``Any`` and need not be inferred), if type
+of the variable has been declared or inferred before, or if you perform a simple
+modification operation in the same scope (such as ``append`` for a list):
+
+.. code-block:: python
+
+   a = []  # Okay because followed by append, inferred type List[int]
+   for i in range(n):
+       a.append(i * i)
+
+However, in more complex cases an explicit type annotation can be
+required (mypy will tell you this). Often the annotation can
+make your code easier to understand, so it doesn't only help mypy but
+everybody who is reading the code!
+
+Redefinitions with incompatible types
+-------------------------------------
+
+Each name within a function only has a single 'declared' type. You can
+reuse for loop indices etc., but if you want to use a variable with
+multiple types within a single function, you may need to declare it
+with the ``Any`` type.
+
+.. code-block:: python
+
+   def f() -> None:
+       n = 1
+       ...
+       n = 'x'        # Type error: n has type int
+
+.. note::
+
+   This limitation could be lifted in a future mypy
+   release.
+
+Note that you can redefine a variable with a more *precise* or a more
+concrete type. For example, you can redefine a sequence (which does
+not support ``sort()``) as a list and sort it in-place:
+
+.. code-block:: python
+
+    def f(x: Sequence[int]) -> None:
+        # Type of x is Sequence[int] here; we don't know the concrete type.
+        x = list(x)
+        # Type of x is List[int] here.
+        x.sort()  # Okay!
+
+Declaring a supertype as variable type
+--------------------------------------
+
+Sometimes the inferred type is a subtype (subclass) of the desired
+type. The type inference uses the first assignment to infer the type
+of a name (assume here that ``Shape`` is the base class of both
+``Circle`` and ``Triangle``):
+
+.. code-block:: python
+
+   shape = Circle()    # Infer shape to be Circle
+   ...
+   shape = Triangle()  # Type error: Triangle is not a Circle
+
+You can just give an explicit type for the variable in cases such the
+above example:
+
+.. code-block:: python
+
+   shape = Circle() # type: Shape   # The variable s can be any Shape,
+                                    # not just Circle
+   ...
+   shape = Triangle()               # OK
+
+Complex type tests
+------------------
+
+Mypy can usually infer the types correctly when using ``isinstance()``
+type tests, but for other kinds of checks you may need to add an
+explicit type cast:
+
+.. code-block:: python
+
+   def f(o: object) -> None:
+       if type(o) is int:
+           o = cast(int, o)
+           g(o + 1)    # This would be an error without the cast
+           ...
+       else:
+           ...
+
+.. note::
+
+    Note that the ``object`` type used in the above example is similar
+    to ``Object`` in Java: it only supports operations defined for *all*
+    objects, such as equality and ``isinstance()``. The type ``Any``,
+    in contrast, supports all operations, even if they may fail at
+    runtime. The cast above would have been unnecessary if the type of
+    ``o`` was ``Any``.
+
+Mypy can't infer the type of ``o`` after the ``type()`` check
+because it only knows about ``isinstance()`` (and the latter is better
+style anyway).  We can write the above code without a cast by using
+``isinstance()``:
+
+.. code-block:: python
+
+   def f(o: object) -> None:
+       if isinstance(o, int):  # Mypy understands isinstance checks
+           g(o + 1)        # Okay; type of o is inferred as int here
+           ...
+
+Type inference in mypy is designed to work well in common cases, to be
+predictable and to let the type checker give useful error
+messages. More powerful type inference strategies often have complex
+and difficult-to-predict failure modes and could result in very
+confusing error messages. The tradeoff is that you as a programmer
+sometimes have to give the type checker a little help.
+
+.. _version_and_platform_checks:
+
+Python version and system platform checks
+-----------------------------------------
+
+Mypy supports the ability to perform Python version checks and platform
+checks (e.g. Windows vs Posix), ignoring code paths that won't be run on
+the targeted Python version or platform. This allows you to more effectively
+typecheck code that supports multiple versions of Python or multiple operating
+systems.
+
+More specifically, mypy will understand the use of ``sys.version_info`` and
+``sys.platform`` checks within ``if/elif/else`` statements. For example:
+
+.. code-block:: python
+
+   import sys
+
+   # Distinguishing between different versions of Python:
+   if sys.version_info >= (3, 5):
+       # Python 3.5+ specific definitions and imports
+   elif sys.version_info[0] >= 3:
+       # Python 3 specific definitions and imports
+   else:
+       # Python 2 specific definitions and imports
+
+   # Distinguishing between different operating systems:
+   if sys.platform.startswith("linux"):
+       # Linux-specific code
+   elif sys.platform == "darwin":
+       # Mac-specific code
+   elif sys.platform == "win32":
+       # Windows-specific code
+   else:
+       # Other systems
+
+.. note::
+
+   Mypy currently does not support more complex checks, and does not assign
+   any special meaning when assigning a ``sys.version_info`` or ``sys.platform``
+   check to a variable. This may change in future versions of mypy.
+
+By default, mypy will use your current version of Python and your current
+operating system as default values for ``sys.version_info`` and
+``sys.platform``.
+
+To target a different Python version, use the ``--python-version X.Y`` flag.
+For example, to verify your code typechecks if were run using Python 2, pass
+in ``--python-version 2.7`` from the command line. Note that you do not need
+to have Python 2.7 installed to perform this check.
+
+To target a different operating system, use the ``--platform PLATFORM`` flag.
+For example, to verify your code typechecks if it were run in Windows, pass
+in ``--platform win32``. See the documentation for
+`sys.platform <https://docs.python.org/3/library/sys.html#sys.platform>`_
+for examples of valid platform parameters.
+
+.. _reveal-type:
+
+Displaying the type of an expression
+------------------------------------
+
+You can use ``reveal_type(expr)`` to ask mypy to display the inferred
+static type of an expression. This can be useful when you don't quite
+understand how mypy handles a particular piece of code. Example:
+
+.. code-block:: python
+
+   reveal_type((1, 'hello'))  # Revealed type is 'Tuple[builtins.int, builtins.str]'
+
+.. note::
+
+   ``reveal_type`` is only understood by mypy and doesn't exist
+   in Python, if you try to run your program. You'll have to remove
+   any ``reveal_type`` calls before you can run your code.
+   ``reveal_type`` is always available and you don't need to import it.
+
+.. _import-cycles:
+
+Import cycles
+-------------
+
+An import cycle occurs where module A imports module B and module B
+imports module A (perhaps indirectly, e.g. ``A -> B -> C -> A``).
+Sometimes in order to add type annotations you have to add extra
+imports to a module and those imports cause cycles that didn't exist
+before.  If those cycles become a problem when running your program,
+there's a trick: if the import is only needed for type annotations in
+forward references (string literals) or comments, you can write the
+imports inside ``if TYPE_CHECKING:`` so that they are not executed at runtime.
+Example:
+
+File ``foo.py``:
+
+.. code-block:: python
+
+   from typing import List, TYPE_CHECKING
+
+   if TYPE_CHECKING:
+       import bar
+
+   def listify(arg: 'bar.BarClass') -> 'List[bar.BarClass]':
+       return [arg]
+
+File ``bar.py``:
+
+.. code-block:: python
+
+   from typing import List
+   from foo import listify
+
+   class BarClass:
+       def listifyme(self) -> 'List[BarClass]':
+           return listify(self)
+
+.. note::
+
+   The ``TYPE_CHECKING`` constant defined by the ``typing`` module
+   is ``False`` at runtime but ``True`` while type checking.
+
+Python 3.5.1 doesn't have ``typing.TYPE_CHECKING``. An alternative is
+to define a constant named ``MYPY`` that has the value ``False``
+at runtime. Mypy considers it to be ``True`` when type checking.
+Here's the above example modified to use ``MYPY``:
+
+.. code-block:: python
+
+   from typing import List
+
+   MYPY = False
+   if MYPY:
+       import bar
+
+   def listify(arg: 'bar.BarClass') -> 'List[bar.BarClass]':
+       return [arg]
diff --git a/docs/source/conf.py b/docs/source/conf.py
new file mode 100644
index 0000000..cf64842
--- /dev/null
+++ b/docs/source/conf.py
@@ -0,0 +1,268 @@
+# -*- coding: utf-8 -*-
+#
+# Mypy documentation build configuration file, created by
+# sphinx-quickstart on Sun Sep 14 19:50:35 2014.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys
+import os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+sys.path.insert(0, os.path.abspath('../..'))
+
+from mypy.version import __version__ as mypy_version
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = []
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'Mypy'
+copyright = u'2016, Jukka Lehtosalo'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = mypy_version.split('-')[0]
+# The full version, including alpha/beta/rc tags.
+release = mypy_version
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = []
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+#keep_warnings = False
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  See the documentation for
+# a list of builtin themes.
+try:
+    import sphinx_rtd_theme
+except:
+    html_theme = 'default'
+else:
+    html_theme = 'sphinx_rtd_theme'
+    html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+#html_static_path = ['_static']
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+#html_extra_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'Mypydoc'
+
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+#  author, documentclass [howto, manual, or own class]).
+latex_documents = [
+  ('index', 'Mypy.tex', u'Mypy Documentation',
+   u'Jukka', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+    ('index', 'mypy', u'Mypy Documentation',
+     [u'Jukka Lehtosalo'], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+#  dir menu entry, description, category)
+texinfo_documents = [
+  ('index', 'Mypy', u'Mypy Documentation',
+   u'Jukka', 'Mypy', 'One line description of project.',
+   'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
+
+# If true, do not generate a @detailmenu in the "Top" node's menu.
+#texinfo_no_detailmenu = False
+
+rst_prolog = '.. |...| unicode:: U+2026   .. ellipsis\n'
diff --git a/docs/source/config_file.rst b/docs/source/config_file.rst
new file mode 100644
index 0000000..001f6c9
--- /dev/null
+++ b/docs/source/config_file.rst
@@ -0,0 +1,184 @@
+.. _config-file:
+
+The mypy configuration file
+===========================
+
+Mypy supports reading configuration settings from a file.  By default
+it uses the file ``mypy.ini`` in the current directory; the
+``--config-file`` command-line flag can be used to read a different
+file instead (see :ref:`--config-file <config-file-flag>`).
+
+Most flags correspond closely to :ref:`command-line flags
+<command-line>` but there are some differences in flag names and some
+flags may take a different value based on the module being processed.
+
+The configuration file format is the usual
+`ini file <https://docs.python.org/3.6/library/configparser.html>`_
+format.  It should contain section names in square brackets and flag
+settings of the form `NAME = VALUE`.  Comments start with ``#``
+characters.
+
+- A section named ``[mypy]`` must be present.  This specifies
+  the global flags.
+
+- Additional sections named ``[mypy-PATTERN1,PATTERN2,...]`` may be
+  present, where ``PATTERN1``, ``PATTERN2`` etc. are `fnmatch patterns
+  <https://docs.python.org/3.6/library/fnmatch.html>`_
+  separated by commas.  These sections specify additional flags that
+  only apply to *modules* whose name matches at least one of the patterns.
+
+Global flags
+************
+
+The following global flags may only be set in the global section
+(``[mypy]``).
+
+- ``python_version`` (string) specifies the Python version used to
+  parse and check the target program.  The format is ``DIGIT.DIGIT``
+  for example ``2.7``.  The default is the version of the Python
+  interpreter used to run mypy.
+
+- ``platform`` (string) specifies the OS platform for the target
+  program, for example ``darwin`` or ``win32`` (meaning OS X or
+  Windows, respectively).  The default is the current platform as
+  revealed by Python's ``sys.platform`` variable.
+
+- ``custom_typing_module`` (string) specifies the name of an
+  alternative module which is to be considered equivalent to the
+  ``typing`` module.
+
+- ``custom_typeshed_dir`` (string) specifies the name of an
+  alternative directory which is used to look for stubs instead of the
+  default ``typeshed`` directory.
+
+- ``mypy_path`` (string) specifies the paths to use, after trying the paths
+  from ``MYPYPATH`` environment variable.  Useful if you'd like to keep stubs
+  in your repo, along with the config file.
+
+- ``warn_incomplete_stub`` (Boolean, default False) warns for missing
+  type annotation in typeshed.  This is only relevant in combination
+  with ``check_untyped_defs``.
+
+- ``warn_redundant_casts`` (Boolean, default False) warns about
+  casting an expression to its inferred type.
+
+- ``warn_unused_ignores`` (Boolean, default False) warns about
+  unneeded ``# type: ignore`` comments.
+
+- ``strict_optional`` (Boolean, default False) enables experimental
+  strict Optional checks.
+
+- ``scripts_are_modules`` (Boolean, default False) makes script ``x``
+  become module ``x`` instead of ``__main__``.  This is useful when
+  checking multiple scripts in a single run.
+
+- ``verbosity`` (integer, default 0) controls how much debug output
+  will be generated.  Higher numbers are more verbose.
+
+- ``pdb`` (Boolean, default False) invokes pdb on fatal error.
+
+- ``show_traceback`` (Boolean, default False) shows traceback on fatal
+  error.
+
+- ``dump_type_stats`` (Boolean, default False) dumps stats about type
+  definitions.
+
+- ``dump_inference_stats`` (Boolean, default False) dumps stats about
+  type inference.
+
+- ``fast_parser`` (Boolean, default False) enables the experimental
+  fast parser.
+
+- ``incremental`` (Boolean, default False) enables the experimental
+  module cache.
+
+- ``cache_dir`` (string, default ``.mypy_cache``) stores module cache
+  info in the given folder in incremental mode.
+
+- ``show_error_context`` (Boolean, default False) shows
+  context notes before errors.
+
+- ``show_column_numbers`` (Boolean, default False) shows column numbers in
+  error messages.
+
+
+.. _per-module-flags:
+
+Per-module flags
+****************
+
+The following flags may vary per module.  They may also be specified in
+the global section; the global section provides defaults which are
+overridden by the pattern sections matching the module name.
+
+.. note::
+
+   If multiple pattern sections match a module they are processed in
+   unspecified order.
+
+- ``follow_imports`` (string, default ``normal``) directs what to do
+  with imports when the imported module is found as a ``.py`` file and
+  not part of the files, modules and packages on the command line.
+  The four possible values are ``normal``, ``silent``, ``skip`` and
+  ``error``.  For explanations see the discussion for the
+  :ref:`--follow-imports <follow-imports>` command line flag.  Note
+  that if pattern matching is used, the pattern should match the name
+  of the _imported_ module, not the module containing the import
+  statement.
+
+- ``ignore_missing_imports`` (Boolean, default False) suppress error
+  messages about imports that cannot be resolved.  Note that if
+  pattern matching is used, the pattern should match the name of the
+  _imported_ module, not the module containing the import statement.
+
+- ``silent_imports`` (Boolean, deprecated) equivalent to
+  ``follow_imports=skip`` plus ``ignore_missing_imports=True``.
+
+- ``almost_silent`` (Boolean, deprecated) equivalent to
+  ``follow_imports=skip``.
+
+- ``disallow_untyped_calls`` (Boolean, default False) disallows
+  calling functions without type annotations from functions with type
+  annotations.
+
+- ``disallow_untyped_defs`` (Boolean, default False) disallows
+  defining functions without type annotations or with incomplete type
+  annotations.
+
+- ``check_untyped_defs`` (Boolean, default False) type-checks the
+  interior of functions without type annotations.
+
+- ``debug_cache`` (Boolean, default False) writes the incremental
+  cache JSON files using a more readable, but slower format.
+
+- ``show_none_errors`` (Boolean, default True) shows errors related
+  to strict ``None`` checking, if the global ``strict_optional`` flag
+  is enabled.
+
+- ``ignore_errors`` (Boolean, default False) ignores all non-fatal
+  errors.
+
+- ``warn_no_return`` (Boolean, default False) shows errors for
+  missing return statements on some execution paths.
+
+Example
+*******
+
+You might put this in your ``mypy.ini`` file at the root of your repo:
+
+.. code-block:: text
+
+    [mypy]
+    python_version = 2.7
+    [mypy-foo.*]
+    disallow_untyped_defs = True
+
+This automatically sets ``--python-version 2.7`` (a.k.a. ``--py2``)
+for all mypy runs in this tree, and also selectively turns on the
+``--disallow-untyped-defs`` flag for all modules in the ``foo``
+package.  This issues an error for function definitions without
+type annotations in that subdirectory only.
+
+.. note::
+
+   Configuration flags are liable to change between releases.
diff --git a/docs/source/duck_type_compatibility.rst b/docs/source/duck_type_compatibility.rst
new file mode 100644
index 0000000..a128b69
--- /dev/null
+++ b/docs/source/duck_type_compatibility.rst
@@ -0,0 +1,40 @@
+Duck type compatibility
+-----------------------
+
+In Python, certain types are compatible even though they aren't subclasses of
+each other. For example, ``int`` objects are valid whenever ``float`` objects
+are expected. Mypy supports this idiom via *duck type compatibility*. As of
+now, this is only supported for a small set of built-in types:
+
+* ``int`` is duck type compatible with ``float`` and ``complex``.
+* ``float`` is duck type compatible with ``complex``.
+* In Python 2, ``str`` is duck type compatible with ``unicode``.
+
+.. note::
+
+   Mypy support for Python 2 is still work in progress.
+
+For example, mypy considers an ``int`` object to be valid whenever a
+``float`` object is expected.  Thus code like this is nice and clean
+and also behaves as expected:
+
+.. code-block:: python
+
+   def degrees_to_radians(x: float) -> float:
+       return math.pi * degrees / 180
+
+   n = 90  # Inferred type 'int'
+   print(degrees_to_radians(n))   # Okay!
+
+.. note::
+
+   Note that in Python 2 a ``str`` object with non-ASCII characters is
+   often *not valid* when a unicode string is expected. The mypy type
+   system does not consider a string with non-ASCII values as a
+   separate type so some programs with this kind of error will
+   silently pass type checking. In Python 3 ``str`` and ``bytes`` are
+   separate, unrelated types and this kind of error is easy to
+   detect. This a good reason for preferring Python 3 over Python 2!
+
+   See :ref:`text-and-anystr` for details on how to enforce that a
+   value must be a unicode string in a cross-compatible way.
diff --git a/docs/source/dynamic_typing.rst b/docs/source/dynamic_typing.rst
new file mode 100644
index 0000000..ba76442
--- /dev/null
+++ b/docs/source/dynamic_typing.rst
@@ -0,0 +1,86 @@
+.. _dynamic_typing:
+
+
+Dynamically typed code
+======================
+
+As mentioned earlier, bodies of functions that don't have have any
+explicit types in their function annotation are dynamically typed
+(operations are checked at runtime). Code outside functions is
+statically typed by default, and types of variables are inferred. This
+does usually the right thing, but you can also make any variable
+dynamically typed by defining it explicitly with the type ``Any``:
+
+.. code-block:: python
+
+   from typing import Any
+
+   s = 1                 # Statically typed (type int)
+   d = 1  # type: Any    # Dynamically typed (type Any)
+   s = 'x'               # Type check error
+   d = 'x'               # OK
+
+Operations on Any values
+------------------------
+
+You can do anything using a value with type ``Any``, and type checker
+does not complain:
+
+.. code-block:: python
+
+    def f(x: Any) -> int:
+        # All of these are valid!
+        x.foobar(1, y=2)
+        print(x[3] + 'f')
+        if x:
+            x.z = x(2)
+        open(x).read()
+        return x
+
+Values derived from an ``Any`` value also often have the type ``Any``
+implicitly, as mypy can't infer a more precise result type. For
+example, if you get the attribute of an ``Any`` value or call a
+``Any`` value the result is ``Any``:
+
+.. code-block:: python
+
+    def f(x: Any) -> None:
+        y = x.foo()  # y has type Any
+        y.bar()      # Okay as well!
+
+``Any`` types may propagate through your program, making type checking
+less effective, unless you are careful.
+
+Any vs. object
+--------------
+
+The type ``object`` is another type that can have an instance of arbitrary
+type as a value. Unlike ``Any``, ``object`` is an ordinary static type (it
+is similar to ``Object`` in Java), and only operations valid for *all*
+types are accepted for ``object`` values. These are all valid:
+
+.. code-block:: python
+
+    def f(o: object) -> None:
+        if o:
+            print(o)
+        print(isinstance(o, int))
+        o = 2
+        o = 'foo'
+
+These are, however, flagged as errors, since not all objects support these
+operations:
+
+.. code-block:: python
+
+    def f(o: object) -> None:
+        o.foo()       # Error!
+        o + 2         # Error!
+        open(o)       # Error!
+        n = 1  # type: int
+        n = o         # Error!
+
+You can use ``cast()`` (see chapter :ref:`casts`) or ``isinstance`` to
+go from a general type such as ``object`` to a more specific
+type (subtype) such as ``int``.  ``cast()`` is not needed with
+dynamically typed values (values with type ``Any``).
diff --git a/docs/source/faq.rst b/docs/source/faq.rst
new file mode 100644
index 0000000..9fd73b4
--- /dev/null
+++ b/docs/source/faq.rst
@@ -0,0 +1,270 @@
+Frequently Asked Questions
+==========================
+
+Why have both dynamic and static typing?
+****************************************
+
+Dynamic typing can be flexible, powerful, convenient and easy. But
+it's not always the best approach; there are good reasons why many
+developers choose to use statically typed languages.
+
+Here are some potential benefits of mypy-style static typing:
+
+- Static typing can make programs easier to understand and
+  maintain. Type declarations can serve as machine-checked
+  documentation. This is important as code is typically read much more
+  often than modified, and this is especially important for large and
+  complex programs.
+
+- Static typing can help you find bugs earlier and with less testing
+  and debugging. Especially in large and complex projects this can be
+  a major time-saver.
+
+- Static typing can help you find difficult-to-find bugs before your
+  code goes into production. This can improve reliability and reduce
+  the number of security issues.
+
+- Static typing makes it practical to build very useful development
+  tools that can improve programming productivity or software quality,
+  including IDEs with precise and reliable code completion, static
+  analysis tools, etc.
+
+- You can get the benefits of both dynamic and static typing in a
+  single language. Dynamic typing can be perfect for a small project
+  or for writing the UI of your program, for example. As your program
+  grows, you can adapt tricky application logic to static typing to
+  help maintenance.
+
+See also the `front page <http://www.mypy-lang.org>`_ of the mypy web
+site.
+
+Would my project benefit from static typing?
+********************************************
+
+For many projects dynamic typing is perfectly fine (we think that
+Python is a great language). But sometimes your projects demand bigger
+guns, and that's when mypy may come in handy.
+
+If some of these ring true for your projects, mypy (and static typing)
+may be useful:
+
+- Your project is large or complex.
+
+- Your codebase must be maintained for a long time.
+
+- Multiple developers are working on the same code.
+
+- Running tests takes a lot of time or work (type checking may help
+  you find errors early in development, reducing the number of testing
+  iterations).
+
+- Some project members (devs or management) don't like dynamic typing,
+  but others prefer dynamic typing and Python syntax. Mypy could be a
+  solution that everybody finds easy to accept.
+
+- You want to future-proof your project even if currently none of the
+  above really apply.
+
+Can I use mypy to type check my existing Python code?
+*****************************************************
+
+It depends. Compatibility is pretty good, but some Python features are
+not yet implemented or fully supported. The ultimate goal is to make
+using mypy practical for most Python code. Code that uses complex
+introspection or metaprogramming may be impractical to type check, but
+it should still be possible to use static typing in other parts of a
+program.
+
+Will static typing make my programs run faster?
+***********************************************
+
+Mypy only does static type checking and it does not improve
+performance. It has a minimal performance impact. In the future, there
+could be other tools that can compile statically typed mypy code to C
+modules or to efficient JVM bytecode, for example, but this is outside
+the scope of the mypy project. It may also be possible to modify
+existing Python VMs to take advantage of static type information, but
+whether this is feasible is still unknown. This is nontrivial since
+the runtime types do not necessarily correspond to the static types.
+
+How do I type check my Python 2 code?
+*************************************
+
+You can use a `comment-based function annotation syntax
+<https://www.python.org/dev/peps/pep-0484/#suggested-syntax-for-python-2-7-and-straddling-code>`_
+and use the ``--py2`` command-line option to type check your Python 2 code.
+You'll also need to install ``typing`` for Python 2 via ``pip install typing``.
+
+Is mypy free?
+*************
+
+Yes. Mypy is free software, and it can also be used for commercial and
+proprietary projects. Mypy is available under the MIT license.
+
+Why not use structural subtyping?
+*********************************
+
+Mypy primarily uses `nominal subtyping
+<https://en.wikipedia.org/wiki/Nominative_type_system>`_ instead of
+`structural subtyping
+<https://en.wikipedia.org/wiki/Structural_type_system>`_. Some argue
+that structural subtyping is better suited for languages with duck
+typing such as Python.
+
+Here are some reasons why mypy uses nominal subtyping:
+
+1. It is easy to generate short and informative error messages when
+   using a nominal type system. This is especially important when
+   using type inference.
+
+2. Python supports basically nominal isinstance tests and they are
+   widely used in programs. It is not clear how to support isinstance
+   in a purely structural type system while remaining compatible with
+   Python idioms.
+
+3. Many programmers are already familiar with nominal subtyping and it
+   has been successfully used in languages such as Java, C++ and
+   C#. Only few languages use structural subtyping.
+
+However, structural subtyping can also be useful. Structural subtyping
+is a likely feature to be added to mypy in the future, even though we
+expect that most mypy programs will still primarily use nominal
+subtyping.
+
+I like Python and I have no need for static typing
+**************************************************
+
+That wasn't really a question, was it? Mypy is not aimed at replacing
+Python. The goal is to give more options for Python programmers, to
+make Python a more competitive alternative to other statically typed
+languages in large projects, to improve programmer productivity and to
+improve software quality.
+
+How are mypy programs different from normal Python?
+***************************************************
+
+Since you use a vanilla Python implementation to run mypy programs,
+mypy programs are also Python programs. The type checker may give
+warnings for some valid Python code, but the code is still always
+runnable. Also, some Python features and syntax are still not
+supported by mypy, but this is gradually improving.
+
+The obvious difference is the availability of static type
+checking. The section :ref:`common_issues` mentions some
+modifications to Python code that may be required to make code type
+check without errors. Also, your code must make attributes explicit and
+use a explicit protocol representation. For example, you may want to
+subclass an Abstract Base Class such as ``typing.Iterable``.
+
+Mypy will support modular, efficient type checking, and this seems to
+rule out type checking some language features, such as arbitrary
+runtime addition of methods. However, it is likely that many of these
+features will be supported in a restricted form (for example, runtime
+modification is only supported for classes or methods registered as
+dynamic or 'patchable').
+
+How is mypy different from PyPy?
+********************************
+
+*This answer relates to PyPy as a Python implementation. See also the answer related to RPython below.*
+
+Mypy and PyPy are orthogonal. Mypy does static type checking, i.e. it
+is basically a linter, but static typing has no runtime effect,
+whereas the PyPy is an Python implementation. You can use PyPy to run
+mypy programs.
+
+How is mypy different from Cython?
+**********************************
+
+`Cython <http://cython.org/>`_ is a variant of Python that supports
+compilation to CPython C modules. It can give major speedups to
+certain classes of programs compared to CPython, and it provides
+static typing (though this is different from mypy). Mypy differs in
+the following aspects, among others:
+
+- Cython is much more focused on performance than mypy. Mypy is only
+  about static type checking, and increasing performance is not a
+  direct goal.
+
+- The mypy syntax is arguably simpler and more "Pythonic" (no cdef/cpdef, etc.) for statically typed code.
+
+- The mypy syntax is compatible with Python. Mypy programs are normal
+  Python programs that can be run using any Python
+  implementation. Cython has many incompatible extensions to Python
+  syntax, and Cython programs generally cannot be run without first
+  compiling them to CPython extension modules via C. Cython also has a
+  pure Python mode, but it seems to support only a subset of Cython
+  functionality, and the syntax is quite verbose.
+
+- Mypy has a different set of type system features. For example, mypy
+  has genericity (parametric polymorphism), function types and
+  bidirectional type inference, which are not supported by
+  Cython. (Cython has fused types that are different but related to
+  mypy generics. Mypy also has a similar feature as an extension of
+  generics.)
+
+- The mypy type checker knows about the static types of many Python
+  stdlib modules and can effectively type check code that uses them.
+
+- Cython supports accessing C functions directly and many features are
+  defined in terms of translating them to C or C++. Mypy just uses
+  Python semantics, and mypy does not deal with accessing C library
+  functionality.
+
+How is mypy different from Nuitka?
+**********************************
+
+`Nuitka <http://nuitka.net/>`_ is a static compiler that can translate
+Python programs to C++. Nuitka integrates with the CPython
+runtime. Nuitka has additional future goals, such as using type
+inference and whole-program analysis to further speed up code. Here
+are some differences:
+
+- Nuitka is primarily focused on speeding up Python code. Mypy focuses
+  on static type checking and facilitating better tools.
+
+- Whole-program analysis tends to be slow and scale poorly to large or
+  complex programs. It is still unclear if Nuitka can solve these
+  issues. Mypy does not use whole-program analysis and will support
+  modular type checking (though this has not been implemented yet).
+
+How is mypy different from RPython or Shed Skin?
+************************************************
+
+`RPython <http://doc.pypy.org/en/latest/coding-guide.html>`_ and `Shed
+Skin <http://shed-skin.blogspot.co.uk/>`_ are basically statically
+typed subsets of Python. Mypy does the following important things
+differently:
+
+- RPython is primarily designed for implementing virtual machines;
+  mypy is a general-purpose tool.
+
+- Mypy supports both static and dynamic typing. Dynamically typed and
+  statically typed code can be freely mixed and can interact
+  seamlessly.
+
+- Mypy aims to support (in the future) fast and modular type
+  checking. Both RPython and Shed Skin use whole-program type
+  inference which is very slow, does not scale well to large programs
+  and often produces confusing error messages. Mypy can support
+  modularity since it only uses local type inference; static type
+  checking depends on having type annotations for functions
+  signatures.
+
+- Mypy will support introspection, dynamic loading of code and many
+  other dynamic language features (though using these may make static
+  typing less effective). RPython and Shed Skin only support a
+  restricted Python subset without several of these features.
+
+- Mypy supports user-defined generic types.
+
+Mypy is a cool project. Can I help?
+***********************************
+
+Any help is much appreciated! `Contact
+<http://www.mypy-lang.org/contact.html>`_ the developers if you would
+like to contribute. Any help related to development, design,
+publicity, documentation, testing, web site maintenance, financing,
+etc. can be helpful. You can learn a lot by contributing, and anybody
+can help, even beginners! However, some knowledge of compilers and/or
+type systems is essential if you want to work on mypy internals.
diff --git a/docs/source/function_overloading.rst b/docs/source/function_overloading.rst
new file mode 100644
index 0000000..b55cddd
--- /dev/null
+++ b/docs/source/function_overloading.rst
@@ -0,0 +1,60 @@
+Function overloading in stubs
+=============================
+
+Sometimes you have a library function that seems to call for two or
+more signatures.  That's okay -- you can define multiple *overloaded*
+instances of a function with the same name but different signatures in
+a stub file (this feature is not supported for user code, at least not
+yet) using the ``@overload`` decorator. For example, we can define an
+``abs`` function that works for both ``int`` and ``float`` arguments:
+
+.. code-block:: python
+
+   # This is a stub file!
+
+   from typing import overload
+
+   @overload
+   def abs(n: int) -> int: pass
+
+   @overload
+   def abs(n: float) -> float: pass
+
+Note that we can't use ``Union[int, float]`` as the argument type,
+since this wouldn't allow us to express that the return
+type depends on the argument type.
+
+Now if we import ``abs`` as defined in the above library stub, we can
+write code like this, and the types are inferred correctly:
+
+.. code-block:: python
+
+   n = abs(-2)     # 2 (int)
+   f = abs(-1.5)   # 1.5 (float)
+
+Overloaded function variants are still ordinary Python functions and
+they still define a single runtime object. The following code is
+thus valid:
+
+.. code-block:: python
+
+   my_abs = abs
+   my_abs(-2)      # 2 (int)
+   my_abs(-1.5)    # 1.5 (float)
+
+The overload variants must be adjacent in the code. This makes code
+clearer, as you don't have to hunt for overload variants across the
+file.
+
+.. note::
+
+   As generic type variables are erased at runtime when constructing
+   instances of generic types, an overloaded function cannot have
+   variants that only differ in a generic type argument,
+   e.g. ``List[int]`` versus ``List[str]``.
+
+.. note::
+
+   If you are writing a regular module rather than a stub, you can
+   often use a type variable with a value restriction to represent
+   functions as ``abs`` above (see :ref:`type-variable-value-restriction`).
diff --git a/docs/source/generics.rst b/docs/source/generics.rst
new file mode 100644
index 0000000..f6c0640
--- /dev/null
+++ b/docs/source/generics.rst
@@ -0,0 +1,390 @@
+Generics
+========
+
+Defining generic classes
+************************
+
+The built-in collection classes are generic classes. Generic types
+have one or more type parameters, which can be arbitrary types. For
+example, ``Dict[int, str]`` has the type parameters ``int`` and
+``str``, and ``List[int]`` has a type parameter ``int``.
+
+Programs can also define new generic classes. Here is a very simple
+generic class that represents a stack:
+
+.. code-block:: python
+
+   from typing import TypeVar, Generic
+
+   T = TypeVar('T')
+
+   class Stack(Generic[T]):
+       def __init__(self) -> None:
+           # Create an empty list with items of type T
+           self.items = []  # type: List[T]
+
+       def push(self, item: T) -> None:
+           self.items.append(item)
+
+       def pop(self) -> T:
+           return self.items.pop()
+
+       def empty(self) -> bool:
+           return not self.items
+
+The ``Stack`` class can be used to represent a stack of any type:
+``Stack[int]``, ``Stack[Tuple[int, str]]``, etc.
+
+Using ``Stack`` is similar to built-in container types:
+
+.. code-block:: python
+
+   # Construct an empty Stack[int] instance
+   stack = Stack[int]()
+   stack.push(2)
+   stack.pop()
+   stack.push('x')        # Type error
+
+Type inference works for user-defined generic types as well:
+
+.. code-block:: python
+
+   def process(stack: Stack[int]) -> None: ...
+
+   process(Stack())   # Argument has inferred type Stack[int]
+
+Construction of instances of generic types is also type checked:
+
+.. code-block:: python
+
+   class Box(Generic[T]):
+       def __init__(self, content: T) -> None:
+           self.content = content
+
+   Box(1)  # OK, inferred type is Box[int]
+   Box[int](1)  # Also OK
+   s = 'some string'
+   Box[int](s)  # Type error
+
+Generic class internals
+***********************
+
+You may wonder what happens at runtime when you index
+``Stack``. Actually, indexing ``Stack`` returns essentially a copy
+of ``Stack`` that returns instances of the original class on
+instantiation:
+
+>>> print(Stack)
+__main__.Stack
+>>> print(Stack[int])
+__main__.Stack[int]
+>>> print(Stack[int]().__class__)
+__main__.Stack
+
+Note that built-in types ``list``, ``dict`` and so on do not support
+indexing in Python. This is why we have the aliases ``List``, ``Dict``
+and so on in the ``typing`` module. Indexing these aliases gives
+you a class that directly inherits from the target class in Python:
+
+>>> from typing import List
+>>> List[int]
+typing.List[int]
+>>> List[int].__bases__
+(<class 'list'>, typing.MutableSequence)
+
+Generic types could be instantiated or subclassed as usual classes,
+but the above examples illustrate that type variables are erased at
+runtime. Generic ``Stack`` instances are just ordinary
+Python objects, and they have no extra runtime overhead or magic due
+to being generic, other than a metaclass that overloads the indexing
+operator.
+
+.. _generic-functions:
+
+Generic functions
+*****************
+
+Generic type variables can also be used to define generic functions:
+
+.. code-block:: python
+
+   from typing import TypeVar, Sequence
+
+   T = TypeVar('T')      # Declare type variable
+
+   def first(seq: Sequence[T]) -> T:   # Generic function
+       return seq[0]
+
+As with generic classes, the type variable can be replaced with any
+type. That means ``first`` can be used with any sequence type, and the
+return type is derived from the sequence item type. For example:
+
+.. code-block:: python
+
+   # Assume first defined as above.
+
+   s = first('foo')      # s has type str.
+   n = first([1, 2, 3])  # n has type int.
+
+Note also that a single definition of a type variable (such as ``T``
+above) can be used in multiple generic functions or classes. In this
+example we use the same type variable in two generic functions:
+
+.. code-block:: python
+
+   from typing import TypeVar, Sequence
+
+   T = TypeVar('T')      # Declare type variable
+
+   def first(seq: Sequence[T]) -> T:
+       return seq[0]
+
+   def last(seq: Sequence[T]) -> T:
+       return seq[-1]
+
+.. _generic-methods-and-generic-self:
+
+Generic methods and generic self
+********************************
+
+You can also define generic methods — just use a type variable in the
+method signature that is different from class type variables. In particular,
+``self`` may also be generic, allowing a method to return the most precise
+type known at the point of access.
+
+.. note::
+
+   This feature is experimental. Checking code with type annotations for self
+   arguments is still not fully implemented. Mypy may disallow valid code or
+   allow unsafe code.
+
+In this way, for example, you can typecheck chaining of setter methods:
+
+.. code-block:: python
+
+   from typing import TypeVar
+
+   T = TypeVar('T', bound='Shape')
+
+   class Shape:
+       def set_scale(self: T, scale: float) -> T:
+           self.scale = scale
+           return self
+
+   class Circle(Shape):
+       def set_radius(self, r: float) -> 'Circle':
+           self.radius = r
+           return self
+
+   class Square(Shape):
+       def set_width(self, w: float) -> 'Square':
+           self.width = w
+           return self
+
+   circle = Circle().set_scale(0.5).set_radius(2.7)  # type: Circle
+   square = Square().set_scale(0.5).set_width(3.2)  # type: Square
+
+Without using generic ``self``, the last two lines could not be type-checked properly.
+
+Other uses are factory methods, such as copy and deserialization.
+For class methods, you can also define generic ``cls``, using ``Type[T]``:
+
+.. code-block:: python
+
+   from typing import TypeVar, Tuple, Type
+
+   T = TypeVar('T', bound='Friend')
+
+   class Friend:
+       other = None  # type: Friend
+
+       @classmethod
+       def make_pair(cls: Type[T]) -> Tuple[T, T]:
+           a, b = cls(), cls()
+           a.other = b
+           b.other = a
+           return a, b
+
+   class SuperFriend(Friend):
+       pass
+
+   a, b = SuperFriend.make_pair()
+
+Note that when overriding a method with generic ``self``, you must either
+return a generic ``self`` too, or return an instance of the current class.
+In the latter case, you must implement this method in all future subclasses.
+
+Note also that mypy cannot always verify that the implementation of a copy
+or a deserialization method returns the actual type of self. Therefore
+you may need to silence mypy inside these methods (but not at the call site),
+possibly by making use of the ``Any`` type.
+
+.. _type-variable-value-restriction:
+
+Type variables with value restriction
+*************************************
+
+By default, a type variable can be replaced with any type. However, sometimes
+it's useful to have a type variable that can only have some specific types
+as its value. A typical example is a type variable that can only have values
+``str`` and ``bytes``:
+
+.. code-block:: python
+
+   from typing import TypeVar
+
+   AnyStr = TypeVar('AnyStr', str, bytes)
+
+This is actually such a common type variable that ``AnyStr`` is
+defined in ``typing`` and we don't need to define it ourselves.
+
+We can use ``AnyStr`` to define a function that can concatenate
+two strings or bytes objects, but it can't be called with other
+argument types:
+
+.. code-block:: python
+
+   from typing import AnyStr
+
+   def concat(x: AnyStr, y: AnyStr) -> AnyStr:
+       return x + y
+
+   concat('a', 'b')    # Okay
+   concat(b'a', b'b')  # Okay
+   concat(1, 2)        # Error!
+
+Note that this is different from a union type, since combinations
+of ``str`` and ``bytes`` are not accepted:
+
+.. code-block:: python
+
+   concat('string', b'bytes')   # Error!
+
+In this case, this is exactly what we want, since it's not possible
+to concatenate a string and a bytes object! The type checker
+will reject this function:
+
+.. code-block:: python
+
+   def union_concat(x: Union[str, bytes], y: Union[str, bytes]) -> Union[str, bytes]:
+       return x + y  # Error: can't concatenate str and bytes
+
+Another interesting special case is calling ``concat()`` with a
+subtype of ``str``:
+
+.. code-block:: python
+
+    class S(str): pass
+
+    ss = concat(S('foo'), S('bar')))
+
+You may expect that the type of ``ss`` is ``S``, but the type is
+actually ``str``: a subtype gets promoted to one of the valid values
+for the type variable, which in this case is ``str``. This is thus
+subtly different from *bounded quantification* in languages such as
+Java, where the return type would be ``S``. The way mypy implements
+this is correct for ``concat``, since ``concat`` actually returns a
+``str`` instance in the above example:
+
+.. code-block:: python
+
+    >>> print(type(ss))
+    <class 'str'>
+
+You can also use a ``TypeVar`` with a restricted set of possible
+values when defining a generic class. For example, mypy uses the type
+``typing.Pattern[AnyStr]`` for the return value of ``re.compile``,
+since regular expressions can be based on a string or a bytes pattern.
+
+.. _type-variable-upper-bound:
+
+Type variables with upper bounds
+********************************
+
+A type variable can also be restricted to having values that are
+subtypes of a specific type. This type is called the upper bound of
+the type variable, and is specified with the ``bound=...`` keyword
+argument to ``TypeVar``.
+
+.. code-block:: python
+
+   from typing import TypeVar, SupportsAbs
+
+   T = TypeVar('T', bound=SupportsAbs[float])
+
+In the definition of a generic function that uses such a type variable
+``T``, the type represented by ``T`` is assumed to be a subtype of
+its upper bound, so the function can use methods of the upper bound on
+values of type ``T``.
+
+.. code-block:: python
+
+   def largest_in_absolute_value(*xs: T) -> T:
+       return max(xs, key=abs)  # Okay, because T is a subtype of SupportsAbs[float].
+
+In a call to such a function, the type ``T`` must be replaced by a
+type that is a subtype of its upper bound. Continuing the example
+above,
+
+.. code-block:: python
+
+   largest_in_absolute_value(-3.5, 2)   # Okay, has type float.
+   largest_in_absolute_value(5+6j, 7)   # Okay, has type complex.
+   largest_in_absolute_value('a', 'b')  # Error: 'str' is not a subtype of SupportsAbs[float].
+
+Type parameters of generic classes may also have upper bounds, which
+restrict the valid values for the type parameter in the same way.
+
+A type variable may not have both a value restriction (see
+:ref:`type-variable-value-restriction`) and an upper bound.
+
+.. _declaring-decorators:
+
+Declaring decorators
+********************
+
+One common application of type variable upper bounds is in declaring a
+decorator that preserves the signature of the function it decorates,
+regardless of that signature. Here's a complete example:
+
+.. code-block:: python
+
+   from typing import Any, Callable, TypeVar, Tuple, cast
+
+   FuncType = Callable[..., Any]
+   F = TypeVar('F', bound=FuncType)
+
+   # A decorator that preserves the signature.
+   def my_decorator(func: F) -> F:
+       def wrapper(*args, **kwds):
+           print("Calling", func)
+           return func(*args, **kwds)
+       return cast(F, wrapper)
+
+   # A decorated function.
+   @my_decorator
+   def foo(a: int) -> str:
+       return str(a)
+
+   # Another.
+   @my_decorator
+   def bar(x: float, y: float) -> Tuple[float, float, bool]:
+       return (x, y, x > y)
+
+   a = foo(12)
+   reveal_type(a)  # str
+   b = bar(3.14, 0)
+   reveal_type(b)  # Tuple[float, float, bool]
+   foo('x')    # Type check error: incompatible type "str"; expected "int"
+
+From the final block we see that the signatures of the decorated
+functions ``foo()`` and ``bar()`` are the same as those of the original
+functions (before the decorator is applied).
+
+The bound on ``F`` is used so that calling the decorator on a
+non-function (e.g. ``my_decorator(1)``) will be rejected.
+
+Also note that the ``wrapper()`` function is not type-checked. Wrapper
+functions are typically small enough that this is not a big
+problem. This is also the reason for the ``cast()`` call in the
+``return`` statement in ``my_decorator()``. See :ref:`casts`.
diff --git a/docs/source/getting_started.rst b/docs/source/getting_started.rst
new file mode 100644
index 0000000..a41c125
--- /dev/null
+++ b/docs/source/getting_started.rst
@@ -0,0 +1,24 @@
+.. _getting-started:
+
+Getting started
+===============
+
+Installation
+************
+
+Mypy requires Python 3.3 or later.  Once you've `installed Python 3 <https://www.python.org/downloads/>`_, you can install mypy with:
+
+.. code-block:: text
+
+    $ python3 -m pip install mypy
+
+Installing from source
+**********************
+
+To install mypy from source, clone the github repository and then run pip install locally:
+
+.. code-block:: text
+
+    $ git clone https://github.com/python/mypy.git
+    $ cd mypy
+    $ sudo python3 -m pip install --upgrade .
diff --git a/docs/source/index.rst b/docs/source/index.rst
new file mode 100644
index 0000000..90cc749
--- /dev/null
+++ b/docs/source/index.rst
@@ -0,0 +1,42 @@
+.. Mypy documentation master file, created by
+   sphinx-quickstart on Sun Sep 14 19:50:35 2014.
+   You can adapt this file completely to your liking, but it should at least
+   contain the root `toctree` directive.
+
+Welcome to Mypy documentation!
+==============================
+
+Mypy is a static type checker for Python.
+
+.. toctree::
+   :maxdepth: 2
+
+   introduction
+   basics
+   getting_started
+   builtin_types
+   python2
+   type_inference_and_annotations
+   kinds_of_types
+   class_basics
+   dynamic_typing
+   function_overloading
+   casts
+   duck_type_compatibility
+   common_issues
+   generics
+   supported_python_features
+   additional_features
+   command_line
+   config_file
+   python36
+   faq
+   cheat_sheet
+   cheat_sheet_py3
+   revision_history
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`search`
diff --git a/docs/source/introduction.rst b/docs/source/introduction.rst
new file mode 100644
index 0000000..3bcd0ad
--- /dev/null
+++ b/docs/source/introduction.rst
@@ -0,0 +1,30 @@
+Introduction
+============
+
+Mypy is a static type checker for Python. If you sprinkle your code
+with type annotations, mypy can type check your code and find common bugs.
+As mypy is a static analyzer, or a lint-like tool, your code's type
+annotations are just hints and don't interfere when running your program.
+You run your program with a standard Python interpreter, and the annotations
+are treated primarily as comments.
+
+Using the Python 3 function annotation syntax (using the PEP 484 notation) or
+a comment-based annotation syntax for Python 2 code, you will be able to
+efficiently annotate your code and use mypy to check the code for common
+errors. Mypy has a powerful, easy-to-use, type system with modern features
+such as type inference, generics, function types, tuple types and
+union types.
+
+As a developer, you decide how to use mypy in your workflow. You can always
+escape to dynamic typing as mypy's approach to static typing doesn't restrict
+what you can do in your programs. Using mypy will make your programs easier to
+debug, maintain, and understand.
+
+This documentation provides a short introduction to mypy. It will help you
+get started writing statically typed code. Knowledge of Python and a
+statically typed object-oriented language, such as Java, are assumed.
+
+.. note::
+
+   Mypy is still experimental. There will be changes
+   that break backward compatibility.
diff --git a/docs/source/kinds_of_types.rst b/docs/source/kinds_of_types.rst
new file mode 100644
index 0000000..dc639d9
--- /dev/null
+++ b/docs/source/kinds_of_types.rst
@@ -0,0 +1,1002 @@
+Kinds of types
+==============
+
+User-defined types
+******************
+
+Each class is also a type. Any instance of a subclass is also
+compatible with all superclasses. All values are compatible with the
+``object`` type (and also the ``Any`` type).
+
+.. code-block:: python
+
+   class A:
+       def f(self) -> int:        # Type of self inferred (A)
+           return 2
+
+   class B(A):
+       def f(self) -> int:
+            return 3
+       def g(self) -> int:
+           return 4
+
+   a = B() # type: A  # OK (explicit type for a; override type inference)
+   print(a.f())       # 3
+   a.g()              # Type check error: A has no method g
+
+The Any type
+************
+
+A value with the ``Any`` type is dynamically typed. Mypy doesn't know
+anything about the possible runtime types of such value. Any
+operations are permitted on the value, and the operations are checked
+at runtime, similar to normal Python code without type annotations.
+
+``Any`` is compatible with every other type, and vice versa. No
+implicit type check is inserted when assigning a value of type ``Any``
+to a variable with a more precise type:
+
+.. code-block:: python
+
+   a = None  # type: Any
+   s = ''    # type: str
+   a = 2     # OK
+   s = a     # OK
+
+Declared (and inferred) types are *erased* at runtime. They are
+basically treated as comments, and thus the above code does not
+generate a runtime error, even though ``s`` gets an ``int`` value when
+the program is run. Note that the declared type of ``s`` is actually
+``str``!
+
+If you do not define a function return value or argument types, these
+default to ``Any``:
+
+.. code-block:: python
+
+   def show_heading(s) -> None:
+       print('=== ' + s + ' ===')  # No static type checking, as s has type Any
+
+   show_heading(1)  # OK (runtime error only; mypy won't generate an error)
+
+You should give a statically typed function an explicit ``None``
+return type even if it doesn't return a value, as this lets mypy catch
+additional type errors:
+
+.. code-block:: python
+
+   def wait(t: float):  # Implicit Any return value
+       print('Waiting...')
+       time.sleep(t)
+
+   if wait(2) > 1:   # Mypy doesn't catch this error!
+       ...
+
+If we had used an explicit ``None`` return type, mypy would have caught
+the error:
+
+.. code-block:: python
+
+   def wait(t: float) -> None:
+       print('Waiting...')
+       time.sleep(t)
+
+   if wait(2) > 1:   # Error: can't compare None and int
+       ...
+
+The ``Any`` type is discussed in more detail in section :ref:`dynamic_typing`.
+
+.. note::
+
+  A function without any types in the signature is dynamically
+  typed. The body of a dynamically typed function is not checked
+  statically, and local variables have implicit ``Any`` types.
+  This makes it easier to migrate legacy Python code to mypy, as
+  mypy won't complain about dynamically typed functions.
+
+.. _tuple-types:
+
+Tuple types
+***********
+
+The type ``Tuple[T1, ..., Tn]`` represents a tuple with the item types ``T1``, ..., ``Tn``:
+
+.. code-block:: python
+
+   def f(t: Tuple[int, str]) -> None:
+       t = 1, 'foo'    # OK
+       t = 'foo', 1    # Type check error
+
+A tuple type of this kind has exactly a specific number of items (2 in
+the above example). Tuples can also be used as immutable,
+varying-length sequences. You can use the type ``Tuple[T, ...]`` (with
+a literal ``...`` -- it's part of the syntax) for this
+purpose. Example:
+
+.. code-block:: python
+
+    def print_squared(t: Tuple[int, ...]) -> None:
+        for n in t:
+            print(n, n ** 2)
+
+    print_squared(())           # OK
+    print_squared((1, 3, 5))    # OK
+    print_squared([1, 2])       # Error: only a tuple is valid
+
+.. note::
+
+   Usually it's a better idea to use ``Sequence[T]`` instead of ``Tuple[T, ...]``, as
+   ``Sequence`` is also compatible with lists and other non-tuple sequences.
+
+.. note::
+
+   ``Tuple[...]`` is not valid as a base class outside stub files. This is a
+   limitation of the ``typing`` module. One way to work around
+   this is to use a named tuple as a base class (see section :ref:`named-tuples`).
+
+.. _callable-types:
+
+Callable types (and lambdas)
+****************************
+
+You can pass around function objects and bound methods in statically
+typed code. The type of a function that accepts arguments ``A1``, ..., ``An``
+and returns ``Rt`` is ``Callable[[A1, ..., An], Rt]``. Example:
+
+.. code-block:: python
+
+   from typing import Callable
+
+   def twice(i: int, next: Callable[[int], int]) -> int:
+       return next(next(i))
+
+   def add(i: int) -> int:
+       return i + 1
+
+   print(twice(3, add))   # 5
+
+You can only have positional arguments, and only ones without default
+values, in callable types. These cover the vast majority of uses of
+callable types, but sometimes this isn't quite enough. Mypy recognizes
+a special form ``Callable[..., T]`` (with a literal ``...``) which can
+be used in less typical cases. It is compatible with arbitrary
+callable objects that return a type compatible with ``T``, independent
+of the number, types or kinds of arguments. Mypy lets you call such
+callable values with arbitrary arguments, without any checking -- in
+this respect they are treated similar to a ``(*args: Any, **kwargs:
+Any)`` function signature. Example:
+
+.. code-block:: python
+
+   from typing import Callable
+
+    def arbitrary_call(f: Callable[..., int]) -> int:
+        return f('x') + f(y=2)  # OK
+
+    arbitrary_call(ord)   # No static error, but fails at runtime
+    arbitrary_call(open)  # Error: does not return an int
+    arbitrary_call(1)     # Error: 'int' is not callable
+
+Lambdas are also supported. The lambda argument and return value types
+cannot be given explicitly; they are always inferred based on context
+using bidirectional type inference:
+
+.. code-block:: python
+
+   l = map(lambda x: x + 1, [1, 2, 3])   # Infer x as int and l as List[int]
+
+If you want to give the argument or return value types explicitly, use
+an ordinary, perhaps nested function definition.
+
+.. _union-types:
+
+Union types
+***********
+
+Python functions often accept values of two or more different
+types. You can use overloading to model this in statically typed code,
+but union types can make code like this easier to write.
+
+Use the ``Union[T1, ..., Tn]`` type constructor to construct a union
+type. For example, the type ``Union[int, str]`` is compatible with
+both integers and strings. You can use an ``isinstance()`` check to
+narrow down the type to a specific type:
+
+.. code-block:: python
+
+   from typing import Union
+
+   def f(x: Union[int, str]) -> None:
+       x + 1     # Error: str + int is not valid
+       if isinstance(x, int):
+           # Here type of x is int.
+           x + 1      # OK
+       else:
+           # Here type of x is str.
+           x + 'a'    # OK
+
+   f(1)    # OK
+   f('x')  # OK
+   f(1.1)  # Error
+
+.. _optional:
+
+The type of None and optional types
+***********************************
+
+Mypy treats the type of ``None`` as special. ``None`` is a valid value
+for every type, which resembles ``null`` in Java. Unlike Java, mypy
+doesn't treat primitives types
+specially: ``None`` is also valid for primitive types such as ``int``
+and ``float``.
+
+.. note::
+
+   See :ref:`strict_optional` for an experimental mode which allows
+   mypy to check ``None`` values precisely.
+
+When initializing a variable as ``None``, ``None`` is usually an
+empty place-holder value, and the actual value has a different type.
+This is why you need to annotate an attribute in a case like this:
+
+.. code-block:: python
+
+    class A:
+        def __init__(self) -> None:
+            self.count = None  # type: int
+
+Mypy will complain if you omit the type annotation, as it wouldn't be
+able to infer a non-trivial type for the ``count`` attribute
+otherwise.
+
+Mypy generally uses the first assignment to a variable to
+infer the type of the variable. However, if you assign both a ``None``
+value and a non-``None`` value in the same scope, mypy can often do
+the right thing:
+
+.. code-block:: python
+
+   def f(i: int) -> None:
+       n = None  # Inferred type int because of the assignment below
+       if i > 0:
+            n = i
+       ...
+
+Often it's useful to know whether a variable can be
+``None``. For example, this function accepts a ``None`` argument,
+but it's not obvious from its signature:
+
+.. code-block:: python
+
+    def greeting(name: str) -> str:
+        if name:
+            return 'Hello, {}'.format(name)
+        else:
+            return 'Hello, stranger'
+
+    print(greeting('Python'))  # Okay!
+    print(greeting(None))      # Also okay!
+
+Mypy lets you use ``Optional[t]`` to document that ``None`` is a
+valid argument type:
+
+.. code-block:: python
+
+    from typing import Optional
+
+    def greeting(name: Optional[str]) -> str:
+        if name:
+            return 'Hello, {}'.format(name)
+        else:
+            return 'Hello, stranger'
+
+Mypy treats this as semantically equivalent to the previous example,
+since ``None`` is implicitly valid for any type, but it's much more
+useful for a programmer who is reading the code. You can equivalently
+use ``Union[str, None]``, but ``Optional`` is shorter and more
+idiomatic.
+
+.. note::
+
+    ``None`` is also used as the return type for functions that don't
+    return a value, i.e. that implicitly return ``None``. Mypy doesn't
+    use ``NoneType`` for this, since it would
+    look awkward, even though that is the real name of the type of ``None``
+    (try ``type(None)`` in the interactive interpreter to see for yourself).
+
+.. _strict_optional:
+
+Experimental strict optional type and None checking
+***************************************************
+
+Currently, ``None`` is a valid value for each type, similar to
+``null`` or ``NULL`` in many languages. However, you can use the
+experimental ``--strict-optional`` command line option to tell mypy
+that types should not include ``None``
+by default. The ``Optional`` type modifier is then used to define
+a type variant that includes ``None``, such as ``Optional[int]``:
+
+.. code-block:: python
+
+   from typing import Optional
+
+   def f() -> Optional[int]:
+       return None  # OK
+
+   def g() -> int:
+       ...
+       return None  # Error: None not compatible with int
+
+Also, most operations will not be allowed on unguarded ``None``
+or ``Optional`` values:
+
+.. code-block:: python
+
+   def f(x: Optional[int]) -> int:
+       return x + 1  # Error: Cannot add None and int
+
+Instead, an explicit ``None`` check is required. Mypy has
+powerful type inference that lets you use regular Python
+idioms to guard against ``None`` values. For example, mypy
+recognizes ``is None`` checks:
+
+.. code-block:: python
+
+   def f(x: Optional[int]) -> int:
+       if x is None:
+           return 0
+       else:
+           # The inferred type of x is just int here.
+           return x + 1
+
+Mypy will infer the type of ``x`` to be ``int`` in the else block due to the
+check against ``None`` in the if condition.
+
+.. note::
+
+    ``--strict-optional`` is experimental and still has known issues.
+
+Class name forward references
+*****************************
+
+Python does not allow references to a class object before the class is
+defined. Thus this code does not work as expected:
+
+.. code-block:: python
+
+   def f(x: A) -> None:  # Error: Name A not defined
+       ....
+
+   class A:
+       ...
+
+In cases like these you can enter the type as a string literal — this
+is a *forward reference*:
+
+.. code-block:: python
+
+   def f(x: 'A') -> None:  # OK
+       ...
+
+   class A:
+       ...
+
+Of course, instead of using a string literal type, you could move the
+function definition after the class definition. This is not always
+desirable or even possible, though.
+
+Any type can be entered as a string literal, and you can combine
+string-literal types with non-string-literal types freely:
+
+.. code-block:: python
+
+   def f(a: List['A']) -> None: ...  # OK
+   def g(n: 'int') -> None: ...      # OK, though not useful
+
+   class A: pass
+
+String literal types are never needed in ``# type:`` comments.
+
+String literal types must be defined (or imported) later *in the same
+module*.  They cannot be used to leave cross-module references
+unresolved.  (For dealing with import cycles, see
+:ref:`import-cycles`.)
+
+.. _type-aliases:
+
+Type aliases
+************
+
+In certain situations, type names may end up being long and painful to type:
+
+.. code-block:: python
+
+   def f() -> Union[List[Dict[Tuple[int, str], Set[int]]], Tuple[str, List[str]]]:
+       ...
+
+When cases like this arise, you can define a type alias by simply
+assigning the type to a variable:
+
+.. code-block:: python
+
+   AliasType = Union[List[Dict[Tuple[int, str], Set[int]]], Tuple[str, List[str]]]
+
+   # Now we can use AliasType in place of the full name:
+
+   def f() -> AliasType:
+       ...
+
+Type aliases can be generic, in this case they could be used in two variants:
+Subscripted aliases are equivalent to original types with substituted type variables,
+number of type arguments must match the number of free type variables
+in generic type alias. Unsubscripted aliases are treated as original types with free
+variables replaced with ``Any``. Examples (following `PEP 484
+<https://www.python.org/dev/peps/pep-0484/#type-aliases>`_):
+
+.. code-block:: python
+
+    from typing import TypeVar, Iterable, Tuple, Union, Callable
+    S = TypeVar('S')
+    TInt = Tuple[int, S]
+    UInt = Union[S, int]
+    CBack = Callable[..., S]
+
+    def response(query: str) -> UInt[str]:  # Same as Union[str, int]
+        ...
+    def activate(cb: CBack[S]) -> S:        # Same as Callable[..., S]
+        ...
+    table_entry: TInt  # Same as Tuple[int, Any]
+
+    T = TypeVar('T', int, float, complex)
+    Vec = Iterable[Tuple[T, T]]
+
+    def inproduct(v: Vec[T]) -> T:
+        return sum(x*y for x, y in v)
+
+    def dilate(v: Vec[T], scale: T) -> Vec[T]:
+        return ((x * scale, y * scale) for x, y in v)
+
+    v1: Vec[int] = []      # Same as Iterable[Tuple[int, int]]
+    v2: Vec = []           # Same as Iterable[Tuple[Any, Any]]
+    v3: Vec[int, int] = [] # Error: Invalid alias, too many type arguments!
+
+Type aliases can be imported from modules like any names. Aliases can target another
+aliases (although building complex chains of aliases is not recommended, this
+impedes code readability, thus defeating the purpose of using aliases).
+Following previous examples:
+
+.. code-block:: python
+
+    from typing import TypeVar, Generic, Optional
+    from first_example import AliasType
+    from second_example import Vec
+
+    def fun() -> AliasType:
+        ...
+
+    T = TypeVar('T')
+    class NewVec(Generic[T], Vec[T]):
+        ...
+    for i, j in NewVec[int]():
+        ...
+
+    OIntVec = Optional[Vec[int]]
+
+.. note::
+
+    A type alias does not create a new type. It's just a shorthand notation for
+    another type -- it's equivalent to the target type. For generic type aliases
+    this means that variance of type variables used for alias definition does not
+    apply to aliases. A parameterized generic alias is treated simply as an original
+    type with the corresponding type variables substituted.
+
+.. _newtypes:
+
+NewTypes
+********
+
+(Freely after `PEP 484
+<https://www.python.org/dev/peps/pep-0484/#newtype-helper-function>`_.)
+
+There are also situations where a programmer might want to avoid logical errors by
+creating simple classes. For example:
+
+.. code-block:: python
+
+    class UserId(int):
+        pass
+
+    get_by_user_id(user_id: UserId):
+        ...
+
+However, this approach introduces some runtime overhead. To avoid this, the typing
+module provides a helper function ``NewType`` that creates simple unique types with
+almost zero runtime overhead. Mypy will treat the statement
+``Derived = NewType('Derived', Base)`` as being roughly equivalent to the following
+definition:
+
+.. code-block:: python
+
+    class Derived(Base):
+        def __init__(self, _x: Base) -> None:
+            ...
+
+However, at runtime, ``NewType('Derived', Base)`` will return a dummy function that
+simply returns its argument:
+
+.. code-block:: python
+
+    def Derived(_x):
+        return _x
+
+Mypy will require explicit casts from ``int`` where ``UserId`` is expected, while
+implicitly casting from ``UserId`` where ``int`` is expected. Examples:
+
+.. code-block:: python
+
+    from typing import NewType
+
+    UserId = NewType('UserId', int)
+
+    def name_by_id(user_id: UserId) -> str:
+        ...
+
+    UserId('user')          # Fails type check
+
+    name_by_id(42)          # Fails type check
+    name_by_id(UserId(42))  # OK
+
+    num = UserId(5) + 1     # type: int
+
+``NewType`` accepts exactly two arguments. The first argument must be a string literal
+containing the name of the new type and must equal the name of the variable to which the new
+type is assigned. The second argument must be a properly subclassable class, i.e.,
+not a type construct like ``Union``, etc.
+
+The function returned by ``NewType`` accepts only one argument; this is equivalent to
+supporting only one constructor accepting an instance of the base class (see above).
+Example:
+
+.. code-block:: python
+
+    from typing import NewType
+
+    class PacketId:
+        def __init__(self, major: int, minor: int) -> None:
+            self._major = major
+            self._minor = minor
+
+    TcpPacketId = NewType('TcpPacketId', PacketId)
+
+    packet = PacketId(100, 100)
+    tcp_packet = TcpPacketId(packet)  # OK
+
+    tcp_packet = TcpPacketId(127, 0)  # Fails in type checker and at runtime
+
+Both ``isinstance`` and ``issubclass``, as well as subclassing will fail for
+``NewType('Derived', Base)`` since function objects don't support these operations.
+
+.. note::
+
+    Note that unlike type aliases, ``NewType`` will create an entirely new and
+    unique type when used. The intended purpose of ``NewType`` is to help you
+    detect cases where you accidentally mixed together the old base type and the
+    new derived type.
+
+    For example, the following will successfully typecheck when using type
+    aliases:
+
+    .. code-block:: python
+
+        UserId = int
+
+        def name_by_id(user_id: UserId) -> str:
+            ...
+
+        name_by_id(3)  # ints and UserId are synonymous
+
+    But a similar example using ``NewType`` will not typecheck:
+
+    .. code-block:: python
+
+        from typing import NewType
+
+        UserId = NewType('UserId', int)
+
+        def name_by_id(user_id: UserId) -> str:
+            ...
+
+        name_by_id(3)  # int is not the same as UserId
+
+.. _named-tuples:
+
+Named tuples
+************
+
+Mypy recognizes named tuples and can type check code that defines or
+uses them.  In this example, we can detect code trying to access a
+missing attribute:
+
+.. code-block:: python
+
+    Point = namedtuple('Point', ['x', 'y'])
+    p = Point(x=1, y=2)
+    print(p.z)  # Error: Point has no attribute 'z'
+
+If you use ``namedtuple`` to define your named tuple, all the items
+are assumed to have ``Any`` types. That is, mypy doesn't know anything
+about item types. You can use ``typing.NamedTuple`` to also define
+item types:
+
+.. code-block:: python
+
+    from typing import NamedTuple
+
+    Point = NamedTuple('Point', [('x', int),
+                                 ('y', int)])
+    p = Point(x=1, y='x')  # Argument has incompatible type "str"; expected "int"
+
+Python 3.6 will have an alternative, class-based syntax for named tuples with types.
+Mypy supports it already:
+
+.. code-block:: python
+
+    from typing import NamedTuple
+
+    class Point(NamedTuple):
+        x: int
+        y: int
+
+    p = Point(x=1, y='x')  # Argument has incompatible type "str"; expected "int"
+
+.. note::
+
+   The Python 3.6 syntax requires the ``--fast-parser`` flag. You must also have the
+   `typed_ast <https://pypi.python.org/pypi/typed-ast>`_ package
+   installed and have at least version 0.6.1.  Use ``pip3 install -U typed_ast``.
+
+.. _type-of-class:
+
+The type of class objects
+*************************
+
+(Freely after `PEP 484
+<https://www.python.org/dev/peps/pep-0484/#the-type-of-class-objects>`_.)
+
+Sometimes you want to talk about class objects that inherit from a
+given class.  This can be spelled as ``Type[C]`` where ``C`` is a
+class.  In other words, when ``C`` is the name of a class, using ``C``
+to annotate an argument declares that the argument is an instance of
+``C`` (or of a subclass of ``C``), but using ``Type[C]`` as an
+argument annotation declares that the argument is a class object
+deriving from ``C`` (or ``C`` itself).
+
+For example, assume the following classes:
+
+.. code-block:: python
+
+   class User:
+       # Defines fields like name, email
+
+   class BasicUser(User):
+       def upgrade(self):
+           """Upgrade to Pro"""
+
+   class ProUser(User):
+       def pay(self):
+           """Pay bill"""
+
+Note that ``ProUser`` doesn't inherit from ``BasicUser``.
+
+Here's a function that creates an instance of one of these classes if
+you pass it the right class object:
+
+.. code-block:: python
+
+   def new_user(user_class):
+       user = user_class()
+       # (Here we could write the user object to a database)
+       return user
+
+How would we annotate this function?  Without ``Type[]`` the best we
+could do would be:
+
+.. code-block:: python
+
+   def new_user(user_class: type) -> User:
+       # Same  implementation as before
+
+This seems reasonable, except that in the following example, mypy
+doesn't see that the ``buyer`` variable has type ``ProUser``:
+
+.. code-block:: python
+
+   buyer = new_user(ProUser)
+   buyer.pay()  # Rejected, not a method on User
+
+However, using ``Type[]`` and a type variable with an upper bound (see
+:ref:`type-variable-upper-bound`) we can do better:
+
+.. code-block:: python
+
+   U = TypeVar('U', bound=User)
+
+   def new_user(user_class: Type[U]) -> U:
+       # Same  implementation as before
+
+Now mypy will infer the correct type of the result when we call
+``new_user()`` with a specific subclass of ``User``:
+
+.. code-block:: python
+
+   beginner = new_user(BasicUser)  # Inferred type is BasicUser
+   beginner.upgrade()  # OK
+
+.. note::
+
+   The value corresponding to ``Type[C]`` must be an actual class
+   object that's a subtype of ``C``.  Its constructor must be
+   compatible with the constructor of ``C``.  If ``C`` is a type
+   variable, its upper bound must be a class object.
+
+For more details about ``Type[]`` see `PEP 484
+<https://www.python.org/dev/peps/pep-0484/#the-type-of-class-objects>`_.
+
+.. _text-and-anystr:
+
+Text and AnyStr
+***************
+
+Sometimes you may want to write a function which will accept only unicode
+strings. This can be challenging to do in a codebase intended to run in
+both Python 2 and Python 3 since ``str`` means something different in both
+versions and ``unicode`` is not a keyword in Python 3.
+
+To help solve this issue, use ``typing.Text`` which is aliased to
+``unicode`` in Python 2 and to ``str`` in Python 3. This allows you to
+indicate that a function should accept only unicode strings in a
+cross-compatible way:
+
+.. code-block:: python
+
+   from typing import Text
+
+   def unicode_only(s: Text) -> Text:
+       return s + u'\u2713'
+
+In other cases, you may want to write a function that will work with any
+kind of string but will not let you mix two different string types. To do
+so use ``typing.AnyStr``:
+
+.. code-block:: python
+
+   from typing import AnyStr
+
+   def concat(x: AnyStr, y: AnyStr) -> AnyStr:
+       return x + y
+
+   concat('a', 'b')     # Okay
+   concat(b'a', b'b')   # Okay
+   concat('a', b'b')    # Error: cannot mix bytes and unicode
+
+For more details, see :ref:`type-variable-value-restriction`.
+
+.. note::
+
+   How ``bytes``, ``str``, and ``unicode`` are handled between Python 2 and
+   Python 3 may change in future versions of mypy.
+
+.. _generators:
+
+Generators
+**********
+
+A basic generator that only yields values can be annotated as having a return
+type of either ``Iterator[YieldType]`` or ``Iterable[YieldType]``. For example:
+
+.. code-block:: python
+
+   def squares(n: int) -> Iterator[int]:
+       for i in range(n):
+           yield i * i
+
+If you want your generator to accept values via the ``send`` method or return
+a value, you should use the
+``Generator[YieldType, SendType, ReturnType]`` generic type instead. For example:
+
+.. code-block:: python
+
+   def echo_round() -> Generator[int, float, str]:
+       sent = yield 0
+       while sent >= 0:
+           sent = yield round(sent)
+       return 'Done'
+
+Note that unlike many other generics in the typing module, the ``SendType`` of
+``Generator`` behaves contravariantly, not covariantly or invariantly.
+
+If you do not plan on recieving or returning values, then set the ``SendType``
+or ``ReturnType`` to ``None``, as appropriate. For example, we could have
+annotated the first example as the following:
+
+.. code-block:: python
+
+   def squares(n: int) -> Generator[int, None, None]:
+       for i in range(n):
+           yield i * i
+
+.. _async-and-await:
+
+Typing async/await
+******************
+
+.. note::
+
+   Currently, you must pass in the ``--fast-parser`` flag if you want to run
+   mypy against code containing the ``async/await`` keywords. The fast parser
+   will be enabled by default in a future version of mypy.
+
+   Note that mypy will understand coroutines created using the ``@asyncio.coroutine``
+   decorator both with and without the fast parser enabled.
+
+Mypy supports the ability to type coroutines that use the ``async/await``
+syntax introduced in Python 3.5. For more information regarding coroutines and
+this new syntax, see `PEP 492 <https://www.python.org/dev/peps/pep-0492/>`_.
+
+Functions defined using ``async def`` are typed just like normal functions.
+The return type annotation should be the same as the type of the value you
+expect to get back when ``await``-ing the coroutine.
+
+.. code-block:: python
+
+   import asyncio
+
+   async def format_string(tag: str, count: int) -> str:
+       return 'T-minus {} ({})'.format(count, tag)
+
+   async def countdown_1(tag: str, count: int) -> str:
+       while count > 0:
+           my_str = await format_string(tag, count)  # has type 'str'
+           print(my_str)
+           await asyncio.sleep(0.1)
+           count -= 1
+       return "Blastoff!"
+
+   loop = asyncio.get_event_loop()
+   loop.run_until_complete(countdown_1("Millennium Falcon", 5))
+   loop.close()
+
+The result of calling an ``async def`` function *without awaiting* will be a
+value of type ``Awaitable[T]``:
+
+.. code-block:: python
+
+   my_coroutine = countdown_1("Millennium Falcon", 5)
+   reveal_type(my_coroutine)  # has type 'Awaitable[str]'
+
+If you want to use coroutines in older versions of Python that do not support
+the ``async def`` syntax, you can instead use the ``@asyncio.coroutine``
+decorator to convert a generator into a coroutine.
+
+Note that we set the ``YieldType`` of the generator to be ``Any`` in the
+following example. This is because the exact yield type is an implementation
+detail of the coroutine runner (e.g. the ``asyncio`` event loop) and your
+coroutine shouldn't have to know or care about what precisely that type is.
+
+.. code-block:: python
+
+   from typing import Any, Generator
+   import asyncio
+
+   @asyncio.coroutine
+   def countdown_2(tag: str, count: int) -> Generator[Any, None, str]:
+       while count > 0:
+           print('T-minus {} ({})'.format(count, tag))
+           yield from asyncio.sleep(0.1)
+           count -= 1
+      return "Blastoff!"
+
+   loop = asyncio.get_event_loop()
+   loop.run_until_complete(countdown_2("USS Enterprise", 5))
+   loop.close()
+
+As before, the result of calling a generator decorated with ``@asyncio.coroutine``
+will be a value of type ``Awaitable[T]``.
+
+.. note::
+
+   At runtime, you are allowed to add the ``@asyncio.coroutine`` decorator to
+   both functions and generators. This is useful when you want to mark a
+   work-in-progress function as a coroutine, but have not yet added ``yield`` or
+   ``yield from`` statements:
+
+   .. code-block:: python
+
+      import asyncio
+
+      @asyncio.coroutine
+      def serialize(obj: object) -> str:
+          # todo: add yield/yield from to turn this into a generator
+          return "placeholder"
+
+   However, mypy currently does not support converting functions into
+   coroutines. Support for this feature will be added in a future version, but
+   for now, you can manually force the function to be a generator by doing
+   something like this:
+
+   .. code-block:: python
+
+      from typing import Generator
+      import asyncio
+
+      @asyncio.coroutine
+      def serialize(obj: object) -> Generator[None, None, str]:
+          # todo: add yield/yield from to turn this into a generator
+          if False:
+              yield
+          return "placeholder"
+
+You may also choose to create a subclass of ``Awaitable`` instead:
+
+.. code-block:: python
+
+   from typing import Any, Awaitable, Generator
+   import asyncio
+
+   class MyAwaitable(Awaitable[str]):
+       def __init__(self, tag: str, count: int) -> None:
+           self.tag = tag
+           self.count = count
+
+       def __await__(self) -> Generator[Any, None, str]:
+           for i in range(n, 0, -1):
+               print('T-minus {} ({})'.format(i, tag))
+               yield from asyncio.sleep(0.1)
+           return "Blastoff!"
+
+   def countdown_3(tag: str, count: int) -> Awaitable[str]:
+       return MyAwaitable(tag, count)
+
+   loop = asyncio.get_event_loop()
+   loop.run_until_complete(countdown_3("Heart of Gold", 5))
+   loop.close()
+
+To create an iterable coroutine, subclass ``AsyncIterator``:
+
+.. code-block:: python
+
+   from typing import Optional, AsyncIterator
+   import asyncio
+
+   class arange(AsyncIterator[int]):
+       def __init__(self, start: int, stop: int, step: int) -> None:
+           self.start = start
+           self.stop = stop
+           self.step = step
+           self.count = start - step
+
+       def __aiter__(self) -> AsyncIterator[int]:
+           return self
+
+       async def __anext__(self) -> int:
+           self.count += self.step
+           if self.count == self.stop:
+               raise StopAsyncIteration
+           else:
+               return self.count
+
+   async def countdown_4(tag: str, n: int) -> str:
+       async for i in arange(n, 0, -1):
+           print('T-minus {} ({})'.format(i, tag))
+           await asyncio.sleep(0.1)
+       return "Blastoff!"
+
+   loop = asyncio.get_event_loop()
+   loop.run_until_complete(countdown_4("Serenity", 5))
+   loop.close()
+
+For a more concrete example, the mypy repo has a toy webcrawler that
+demonstrates how to work with coroutines. One version
+`uses async/await <https://github.com/python/mypy/blob/master/test-data/samples/crawl2.py>`_
+and one
+`uses yield from <https://github.com/python/mypy/blob/master/test-data/samples/crawl.py>`_.
diff --git a/docs/source/python2.rst b/docs/source/python2.rst
new file mode 100644
index 0000000..2cb8f32
--- /dev/null
+++ b/docs/source/python2.rst
@@ -0,0 +1,136 @@
+.. _python2:
+
+Type checking Python 2 code
+===========================
+
+For code that needs to be Python 2.7 compatible, function type
+annotations are given in comments, since the function annotation
+syntax was introduced in Python 3. The comment-based syntax is
+specified in `PEP 484 <https://www.python.org/dev/peps/pep-0484>`_.
+
+Run mypy in Python 2 mode by using the ``--py2`` option::
+
+    $ mypy --py2 program.py
+
+To run your program, you must have the ``typing`` module in your
+Python 2 module search path. Use ``pip install typing`` to install the
+module. This also works for Python 3 versions prior to 3.5 that don't
+include ``typing`` in the standard library.
+
+The example below illustrates the Python 2 function type annotation
+syntax. This syntax is also valid in Python 3 mode:
+
+.. code-block:: python
+
+    from typing import List
+
+    def hello(): # type: () -> None
+        print 'hello'
+
+    class Example:
+        def method(self, lst, opt=0, *args, **kwargs):
+            # type: (List[str], int, *str, **bool) -> int
+            """Docstring comes after type comment."""
+            ...
+
+It's worth going through these details carefully to avoid surprises:
+
+- You don't provide an annotation for the ``self`` / ``cls`` variable of
+  methods.
+
+- Docstring always comes *after* the type comment.
+
+- For ``*args`` and ``**kwargs`` the type should be prefixed with
+  ``*`` or ``**``, respectively (except when using the multi-line
+  annotation syntax described below). Again, the above example
+  illustrates this.
+
+- Things like ``Any`` must be imported from ``typing``, even if they
+  are only used in comments.
+
+- In Python 2 mode ``str`` is implicitly promoted to ``unicode``, similar
+  to how ``int`` is compatible with ``float``. This is unlike ``bytes`` and
+  ``str`` in Python 3, which are incompatible. ``bytes`` in Python 2 is
+  equivalent to ``str``. (This might change in the future.)
+
+.. _multi_line_annotation:
+
+Multi-line Python 2 function annotations
+----------------------------------------
+
+Mypy also supports a multi-line comment annotation syntax. You
+can provide a separate annotation for each argument using the variable
+annotation syntax. When using the single-line annotation syntax
+described above, functions with long argument lists tend to result in
+overly long type comments and it's often tricky to see which argument
+type corresponds to which argument. The alternative, multi-line
+annotation syntax makes long annotations easier to read and write.
+
+.. note::
+
+    Multi-line comment annotations currently only work when using the
+    ``--fast-parser`` command line option.  This is not enabled by
+    default because the option isn’t supported on Windows yet.
+
+Here is an example (from PEP 484):
+
+.. code-block:: python
+
+    def send_email(address,     # type: Union[str, List[str]]
+                   sender,      # type: str
+                   cc,          # type: Optional[List[str]]
+                   bcc,         # type: Optional[List[str]]
+                   subject='',
+                   body=None    # type: List[str]
+                   ):
+        # type: (...) -> bool
+        """Send an email message.  Return True if successful."""
+        <code>
+
+You write a separate annotation for each function argument on the same
+line as the argument. Each annotation must be on a separate line. If
+you leave out an annotation for an argument, it defaults to
+``Any``. You provide a return type annotation in the body of the
+function using the form ``# type: (...) -> rt``, where ``rt`` is the
+return type. Note that the  return type annotation contains literal
+three dots.
+
+Note that when using multi-line comments, you do not need to prefix the
+types of your ``*arg`` and ``**kwarg`` parameters with ``*`` or ``**``.
+For example, here is how you would annotate the first example using
+multi-line comments.
+
+.. code-block:: python
+
+    from typing import List
+
+    class Example:
+        def method(self,
+                   lst,      # type: List[str]
+                   opt=0,    # type: int
+                   *args,    # type: str
+                   **kwargs  # type: bool
+                   ):
+            # type: (...) -> int
+            """Docstring comes after type comment."""
+            ...
+
+
+Additional notes
+----------------
+
+- You should include types for arguments with default values in the
+  annotation. The ``opt`` argument of ``method`` in the example at the
+  beginning of this section is an example of this.
+
+- The annotation can be on the same line as the function header or on
+  the following line.
+
+- The type syntax for variables is the same as for Python 3.
+
+- You don't need to use string literal escapes for forward references
+  within comments.
+
+- Mypy uses a separate set of library stub files in `typeshed
+  <https://github.com/python/typeshed>`_ for Python 2. Library support
+  may vary between Python 2 and Python 3.
diff --git a/docs/source/python36.rst b/docs/source/python36.rst
new file mode 100644
index 0000000..f676864
--- /dev/null
+++ b/docs/source/python36.rst
@@ -0,0 +1,100 @@
+.. _python-36:
+
+New features in Python 3.6
+==========================
+
+Python 3.6 will be `released
+<https://www.python.org/dev/peps/pep-0494>`_ in December 2016.  The
+`first beta <https://www.python.org/downloads/release/python-360b1/>`_
+came out in September and adds some exciting features.  Here's the
+support matrix for these in mypy (to be updated with each new mypy
+release).  The intention is to support all of these by the time Python
+3.6 is released.
+
+.. note::
+
+   Mypy only understands Python 3.6 syntax if you use the ``--fast-parser`` flag.
+   This requires that the `typed_ast <https://pypi.python.org/pypi/typed-ast>`_ package is
+   installed and has at least version 0.6.1.  Use ``pip3 install -U typed_ast``.
+   If running mypy on an earlier Python version, you also need to enable 3.6 support
+   through ``--python-version 3.6``.
+
+   Example command line (or use :ref:`config-file`):
+
+     .. code-block:: text
+
+        $ pip3 install -U typed_ast
+        $ mypy --fast-parser --python-version 3.6 program.py
+
+Syntax for variable annotations (`PEP 526 <https://www.python.org/dev/peps/pep-0526>`_)
+---------------------------------------------------------------------------------------
+
+Python 3.6 feature: variables (in global, class or local scope) can
+now have type annotations using either of the two forms:
+
+.. code-block:: python
+
+   foo: Optional[int]
+   bar: List[str] = []
+
+Mypy fully supports this syntax, interpreting them as equivalent to
+
+.. code-block:: python
+
+   foo = None  # type: Optional[int]
+   bar = []  # type: List[str]
+
+.. note::
+
+   See above for how to enable Python 3.6 syntax.
+
+Literal string formatting (`PEP 498 <https://www.python.org/dev/peps/pep-0498>`_)
+---------------------------------------------------------------------------------
+
+Python 3.6 feature: string literals of the form
+``f"text {expression} text"`` evaluate ``expression`` using the
+current evaluation context (locals and globals).
+
+Mypy does not yet support this.
+
+Underscores in numeric literals (`PEP 515 <https://www.python.org/dev/peps/pep-0515>`_)
+---------------------------------------------------------------------------------------
+
+Python 3.6 feature: numeric literals can contain underscores,
+e.g. ``1_000_000``.
+
+Mypy fully supports this syntax:
+
+.. code-block:: python
+
+   precise_val = 1_000_000.000_000_1
+   hexes: List[int] = []
+   hexes.append(0x_FF_FF_FF_FF)
+
+.. note::
+
+   This requires the ``--fast-parser`` flag and it requires that the
+   `typed_ast <https://pypi.python.org/pypi/typed-ast>`_ package is
+   installed and has at least version 0.6.2.  Use ``pip3 install -U typed_ast``.
+
+Asynchronous generators (`PEP 525 <https://www.python.org/dev/peps/pep-0525>`_)
+-------------------------------------------------------------------------------
+
+Python 3.6 feature: coroutines defined with ``async def`` (PEP 492)
+can now also be generators, i.e. contain ``yield`` expressions.
+
+Mypy does not yet support this.
+
+Asynchronous comprehensions (`PEP 530 <https://www.python.org/dev/peps/pep-0530>`_)
+-----------------------------------------------------------------------------------
+
+Python 3.6 feature: coroutines defined with ``async def`` (PEP 492)
+can now also contain list, set and dict comprehensions that use
+``async for`` syntax.
+
+Mypy does not yet support this.
+
+New named tuple syntax
+----------------------
+
+Python 3.6 supports an alternative syntax for named tuples. See :ref:`named-tuples`.
diff --git a/docs/source/revision_history.rst b/docs/source/revision_history.rst
new file mode 100644
index 0000000..2537c51
--- /dev/null
+++ b/docs/source/revision_history.rst
@@ -0,0 +1,176 @@
+Revision history
+================
+
+List of major changes to this document:
+
+- January 2017
+    * Publish ``mypy`` version 0.470 on PyPI.
+
+    * Change package name from ``mypy-lang`` to ``mypy``.
+
+    * Add :ref:`integrating-mypy`.
+
+    * Add :ref:`cheat-sheet-py3`.
+
+    * Major update to :ref:`finding-imports`.
+
+    * Add :ref:`--ignore-missing-imports <ignore-missing-imports>`.
+
+    * Updates to :ref:`config-file`.
+
+    * Document underscore support in numeric literals.
+
+    * Document that arguments prefixed with ``__`` are positional-only.
+
+    * Document that ``--hide-error-context`` is now on by default,
+      and there is a new flag ``--show-error-context``.
+
+    * Add ``ignore_errors`` to :ref:`per-module-flags`.
+
+- November 2016
+    * Publish ``mypy-lang`` version 0.4.6 on PyPI.
+
+    * Add :ref:`getting-started`.
+
+    * Add :ref:`generic-methods-and-generic-self` (experimental).
+
+    * Add :ref:`declaring-decorators`.
+
+    * Discuss generic type aliases in :ref:`type-aliases`.
+
+    * Discuss Python 3.6 named tuple syntax in :ref:`named-tuples`.
+
+    * Updates to :ref:`common_issues`.
+
+    * Updates to :ref:`python-36`.
+
+    * Updates to :ref:`command-line`:
+
+      * ``--custom-typeshed-dir``
+
+      * ``--junit-xml``
+
+      * ``--find-occurrences``
+
+      * ``--cobertura-xml-report``
+
+      * ``--warn-no-return``
+
+    * Updates to :ref:`config-file`:
+
+      * Sections with fnmatch patterns now use
+        module name patterns (previously they were path patterns).
+      * Added ``custom_typeshed_dir``, ``mypy_path`` and ``show_column_numbers``.
+
+    * Mention the magic ``MYPY`` constant in :ref:`import-cycles`.
+
+- October 2016
+    * Publish ``mypy-lang`` version 0.4.5 on PyPI.
+
+    * Add :ref:`python-36`.
+
+    * Add :ref:`config-file`.
+
+    * Updates to :ref:`command-line`: ``--strict-optional-white-list``,
+      ``--disallow-subclassing-any``, ``--config-file``, ``@flagfile``,
+      ``--hide-error-context`` (replaces ``--suppress-error-context``),
+      ``--show-column-numbers`` and ``--scripts-are-modules``.
+
+    * Mention ``typing.TYPE_CHECKING`` in :ref:`import-cycles`.
+
+- August 2016
+    * Publish ``mypy-lang`` version 0.4.4 on PyPI.
+
+    * Add :ref:`newtypes`.
+
+    * Add :ref:`async-and-await`.
+
+    * Add :ref:`text-and-anystr`.
+
+    * Add :ref:`version_and_platform_checks`.
+
+- July 2016
+    * Publish ``mypy-lang`` version 0.4.3 on PyPI.
+
+    * Add :ref:`strict_optional`.
+
+    * Add :ref:`multi_line_annotation`.
+
+- June 2016
+    * Publish ``mypy-lang`` version 0.4.2 on PyPI.
+
+    * Add :ref:`type-of-class`.
+
+    * Add :ref:`cheat-sheet-py2`.
+
+    * Add :ref:`reveal-type`.
+
+- May 2016
+    * Publish ``mypy-lang`` version 0.4 on PyPI.
+
+    * Add :ref:`type-variable-upper-bound`.
+
+    * Document :ref:`command-line`.
+
+- Feb 2016
+    * Publish ``mypy-lang`` version 0.3.1 on PyPI.
+
+    * Document Python 2 support.
+
+- Nov 2015
+    Add :ref:`library-stubs`.
+
+- Jun 2015
+    Remove ``Undefined`` and ``Dynamic``, as they are not in PEP 484.
+
+- Apr 2015
+    Publish ``mypy-lang`` version 0.2.0 on PyPI.
+
+- Mar 2015
+    Update documentation to reflect PEP 484:
+
+    * Add :ref:`named-tuples` and :ref:`optional`.
+
+    * Do not mention type application syntax (for
+      example, ``List[int]()``), as it's no longer supported,
+      due to PEP 484 compatibility.
+
+    * Rename ``typevar`` to ``TypeVar``.
+
+    * Document ``# type: ignore`` which allows
+      locally ignoring spurious errors (:ref:`silencing_checker`).
+
+    * No longer mention
+      ``Any(x)`` as a valid cast, as it will be phased out soon.
+
+    * Mention the new ``.pyi`` stub file extension. Stubs can live
+      in the same directory as the rest of the program.
+
+- Jan 2015
+    Mypy moves closer to PEP 484:
+
+    * Add :ref:`type-aliases`.
+
+    * Update discussion of overloading -- it's now only supported in stubs.
+
+    * Rename ``Function[...]`` to ``Callable[...]``.
+
+- Dec 2014
+    Publish mypy version 0.1.0 on PyPI.
+
+- Oct 2014
+    Major restructuring.
+    Split the HTML documentation into
+    multiple pages.
+
+- Sep 2014
+    Migrated docs to Sphinx.
+
+- Aug 2014
+    Don't discuss native semantics. There is only Python
+    semantics.
+
+- Jul 2013
+    Rewrite to use new syntax. Shift focus to discussing
+    Python semantics. Add more content, including short discussions of
+    :ref:`generic-functions` and :ref:`union-types`.
diff --git a/docs/source/supported_python_features.rst b/docs/source/supported_python_features.rst
new file mode 100644
index 0000000..ca68d4c
--- /dev/null
+++ b/docs/source/supported_python_features.rst
@@ -0,0 +1,20 @@
+Supported Python features and modules
+=====================================
+
+A list of unsupported Python features is maintained in the mypy wiki:
+
+- `Unsupported Python features <https://github.com/python/mypy/wiki/Unsupported-Python-Features>`_
+
+Runtime definition of methods and functions
+*******************************************
+
+By default, mypy will complain if you add a function to a class
+or module outside its definition -- but only if this is visible to the
+type checker. This only affects static checking, as mypy performs no
+additional type checking at runtime. You can easily work around
+this. For example, you can use dynamically typed code or values with
+``Any`` types, or you can use ``setattr`` or other introspection
+features. However, you need to be careful if you decide to do this. If
+used indiscriminately, you may have difficulty using static typing
+effectively, since the type checker cannot see functions defined at
+runtime.
diff --git a/docs/source/type_inference_and_annotations.rst b/docs/source/type_inference_and_annotations.rst
new file mode 100644
index 0000000..76e9cf9
--- /dev/null
+++ b/docs/source/type_inference_and_annotations.rst
@@ -0,0 +1,172 @@
+Type inference and type annotations
+===================================
+
+Type inference
+**************
+
+The initial assignment defines a variable. If you do not explicitly
+specify the type of the variable, mypy infers the type based on the
+static type of the value expression:
+
+.. code-block:: python
+
+   i = 1           # Infer type int for i
+   l = [1, 2]      # Infer type List[int] for l
+
+Type inference is bidirectional and takes context into account. For
+example, the following is valid:
+
+.. code-block:: python
+
+   def f(l: List[object]) -> None:
+       l = [1, 2]  # Infer type List[object] for [1, 2]
+
+In an assignment, the type context is determined by the assignment
+target. In this case this is ``l``, which has the type
+``List[object]``. The value expression ``[1, 2]`` is type checked in
+this context and given the type ``List[object]``. In the previous
+example we introduced a new variable ``l``, and here the type context
+was empty.
+
+Note that the following is not valid, since ``List[int]`` is not
+compatible with ``List[object]``:
+
+.. code-block:: python
+
+   def f(l: List[object], k: List[int]) -> None:
+       l = k       # Type check error: incompatible types in assignment
+
+The reason why the above assignment is disallowed is that allowing the
+assignment could result in non-int values stored in a list of ``int``:
+
+.. code-block:: python
+
+   def f(l: List[object], k: List[int]) -> None:
+       l = k
+       l.append('x')
+       print(k[-1])  # Ouch; a string in List[int]
+
+You can still run the above program; it prints ``x``. This illustrates
+the fact that static types are used during type checking, but they do
+not affect the runtime behavior of programs. You can run programs with
+type check failures, which is often very handy when performing a large
+refactoring. Thus you can always 'work around' the type system, and it
+doesn't really limit what you can do in your program.
+
+Type inference is not used in dynamically typed functions (those
+without an explicit return type) — every local variable type defaults
+to ``Any``, which is discussed later.
+
+Explicit types for variables
+****************************
+
+You can override the inferred type of a variable by using a
+special type comment after an assignment statement:
+
+.. code-block:: python
+
+   x = 1  # type: Union[int, str]
+
+Without the type comment, the type of ``x`` would be just ``int``. We
+use an annotation to give it a more general type ``Union[int, str]``.
+Mypy checks that the type of the initializer is compatible with the
+declared type. The following example is not valid, since the initializer is
+a floating point number, and this is incompatible with the declared
+type:
+
+.. code-block:: python
+
+   x = 1.1  # type: Union[int, str]  # Error!
+
+.. note::
+
+   The best way to think about this is that the type comment sets the
+   type of the variable, not the type of the expression. To force the
+   type of an expression you can use ``cast(<type>, <expression>)``.
+
+Explicit types for collections
+******************************
+
+The type checker cannot always infer the type of a list or a
+dictionary. This often arises when creating an empty list or
+dictionary and assigning it to a new variable that doesn't have an explicit
+variable type. In these cases you can give the type explicitly using
+a type annotation comment:
+
+.. code-block:: python
+
+   l = []  # type: List[int]       # Create empty list with type List[int]
+   d = {}  # type: Dict[str, int]  # Create empty dictionary (str -> int)
+
+Similarly, you can also give an explicit type when creating an empty set:
+
+.. code-block:: python
+
+   s = set()  # type: Set[int]
+
+Declaring multiple variable types at a time
+*******************************************
+
+You can declare more than a single variable at a time. In order to
+nicely work with multiple assignment, you must give each variable a
+type separately:
+
+.. code-block:: python
+
+   i, found = 0, False # type: int, bool
+
+You can optionally use parentheses around the types, assignment targets
+and assigned expression:
+
+.. code-block:: python
+
+   i, found = 0, False # type: (int, bool)      # OK
+   (i, found) = 0, False # type: int, bool      # OK
+   i, found = (0, False) # type: int, bool      # OK
+   (i, found) = (0, False) # type: (int, bool)  # OK
+
+Starred expressions
+*******************
+
+In most cases, mypy can infer the type of starred expressions from the
+right-hand side of an assignment, but not always:
+
+.. code-block:: python
+
+    a, *bs = 1, 2, 3   # OK
+    p, q, *rs = 1, 2   # Error: Type of rs cannot be inferred
+
+On first line, the type of ``bs`` is inferred to be
+``List[int]``. However, on the second line, mypy cannot infer the type
+of ``rs``, because there is no right-hand side value for ``rs`` to
+infer the type from. In cases like these, the starred expression needs
+to be annotated with a starred type:
+
+.. code-block:: python
+
+    p, q, *rs = 1, 2  # type: int, int, *List[int]
+
+Here, the type of ``rs`` is set to ``List[int]``.
+
+Types in stub files
+*******************
+
+:ref:`Stub files <library-stubs>` are written in normal Python 3
+syntax, but generally leaving out runtime logic like variable
+initializers, function bodies, and default arguments, replacing them
+with ellipses.
+
+In this example, each ellipsis ``...`` is literally written in the
+stub file as three dots:
+
+.. code-block:: python
+
+    x = ...  # type: int
+    def afunc(code: str) -> int: ...
+    def afunc(a: int, b: int=...) -> int: ...
+
+.. note::
+
+    The ellipsis ``...`` is also used with a different meaning in
+    :ref:`callable types <callable-types>` and :ref:`tuple types
+    <tuple-types>`.
diff --git a/extensions/README.md b/extensions/README.md
new file mode 100644
index 0000000..73b786b
--- /dev/null
+++ b/extensions/README.md
@@ -0,0 +1,6 @@
+Mypy Extensions
+===============
+
+The "mypy_extensions" module defines experimental extensions to the
+standard "typing" module that are supported by the mypy typechecker.
+
diff --git a/extensions/mypy_extensions.py b/extensions/mypy_extensions.py
new file mode 100644
index 0000000..26e568c
--- /dev/null
+++ b/extensions/mypy_extensions.py
@@ -0,0 +1,97 @@
+"""Defines experimental extensions to the standard "typing" module that are
+supported by the mypy typechecker.
+
+Example usage:
+    from mypy_extensions import TypedDict
+"""
+
+# NOTE: This module must support Python 2.7 in addition to Python 3.x
+
+import sys
+# _type_check is NOT a part of public typing API, it is used here only to mimic
+# the (convenient) behavior of types provided by typing module.
+from typing import _type_check  # type: ignore
+
+
+def _check_fails(cls, other):
+    try:
+        if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools', 'typing']:
+            # Typed dicts are only for static structural subtyping.
+            raise TypeError('TypedDict does not support instance and class checks')
+    except (AttributeError, ValueError):
+        pass
+    return False
+
+
+def _dict_new(cls, *args, **kwargs):
+    return dict(*args, **kwargs)
+
+
+def _typeddict_new(cls, _typename, _fields=None, **kwargs):
+    if _fields is None:
+        _fields = kwargs
+    elif kwargs:
+        raise TypeError("TypedDict takes either a dict or keyword arguments,"
+                        " but not both")
+    return _TypedDictMeta(_typename, (), {'__annotations__': dict(_fields)})
+
+
+class _TypedDictMeta(type):
+    def __new__(cls, name, bases, ns):
+        # Create new typed dict class object.
+        # This method is called directly when TypedDict is subclassed,
+        # or via _typeddict_new when TypedDict is instantiated. This way
+        # TypedDict supports all three syntaxes described in its docstring.
+        # Subclasses and instanes of TypedDict return actual dictionaries
+        # via _dict_new.
+        ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new
+        tp_dict = super(_TypedDictMeta, cls).__new__(cls, name, (dict,), ns)
+        try:
+            # Setting correct module is necessary to make typed dict classes pickleable.
+            tp_dict.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__')
+        except (AttributeError, ValueError):
+            pass
+        anns = ns.get('__annotations__', {})
+        msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
+        anns = {n: _type_check(tp, msg) for n, tp in anns.items()}
+        for base in bases:
+            anns.update(base.__dict__.get('__annotations__', {}))
+        tp_dict.__annotations__ = anns
+        return tp_dict
+
+    __instancecheck__ = __subclasscheck__ = _check_fails
+
+
+TypedDict = _TypedDictMeta('TypedDict', (dict,), {})
+TypedDict.__module__ = __name__
+TypedDict.__doc__ = \
+    """A simple typed name space. At runtime it is equivalent to a plain dict.
+
+    TypedDict creates a dictionary type that expects all of its
+    instances to have a certain set of keys, with each key
+    associated with a value of a consistent type. This expectation
+    is not checked at runtime but is only enforced by typecheckers.
+    Usage::
+
+        Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
+        a: Point2D = {'x': 1, 'y': 2, 'label': 'good'}  # OK
+        b: Point2D = {'z': 3, 'label': 'bad'}           # Fails type check
+        assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
+
+    The type info could be accessed via Point2D.__annotations__. TypedDict
+    supports two additional equivalent forms::
+
+        Point2D = TypedDict('Point2D', x=int, y=int, label=str)
+
+        class Point2D(TypedDict):
+            x: int
+            y: int
+            label: str
+
+    The latter syntax is only supported in Python 3.6+, while two other
+    syntax forms work for Python 2.7 and 3.2+
+    """
+
+
+# Return type that indicates a function does not return
+class NoReturn: pass
diff --git a/extensions/setup.py b/extensions/setup.py
new file mode 100644
index 0000000..3490840
--- /dev/null
+++ b/extensions/setup.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+
+# NOTE: This package must support Python 2.7 in addition to Python 3.x
+
+from distutils.core import setup
+
+version = '0.2.0-dev'
+description = 'Experimental type system extensions for programs checked with the mypy typechecker.'
+long_description = '''
+Mypy Extensions
+===============
+
+The "mypy_extensions" module defines experimental extensions to the
+standard "typing" module that are supported by the mypy typechecker.
+'''.lstrip()
+
+classifiers = [
+    'Development Status :: 2 - Pre-Alpha',
+    'Environment :: Console',
+    'Intended Audience :: Developers',
+    'License :: OSI Approved :: MIT License',
+    'Operating System :: POSIX',
+    'Programming Language :: Python :: 2',
+    'Programming Language :: Python :: 2.7',
+    'Programming Language :: Python :: 3',
+    'Programming Language :: Python :: 3.3',
+    'Programming Language :: Python :: 3.4',
+    'Programming Language :: Python :: 3.5',
+    'Topic :: Software Development',
+]
+
+setup(
+    name='mypy_extensions',
+    version=version,
+    description=description,
+    long_description=long_description,
+    author='David Foster',
+    author_email='david at dafoster.net',
+    url='http://www.mypy-lang.org/',
+    license='MIT License',
+    platforms=['POSIX'],
+    py_modules=['mypy_extensions'],
+    classifiers=classifiers,
+)
diff --git a/lib-typing/2.7/setup.py b/lib-typing/2.7/setup.py
new file mode 100644
index 0000000..18c34d8
--- /dev/null
+++ b/lib-typing/2.7/setup.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+
+"""setup.py for Python 2.x typing module"""
+
+import glob
+import os
+import os.path
+import sys
+
+from distutils.core import setup
+
+if sys.version_info >= (3, 0, 0):
+    sys.stderr.write("ERROR: You need Python 2.x to install this module.\n")
+    exit(1)
+
+version = '0.0.1.dev1'
+description = 'typing (Python 2.x)'
+long_description = '''
+typing (Python 2.x)
+===================
+
+This module is part of mypy, a static type checker for Python.
+'''.lstrip()
+
+classifiers = [
+    'Development Status :: 2 - Pre-Alpha',
+    'Environment :: Console',
+    'Intended Audience :: Developers',
+    'License :: OSI Approved :: MIT License',
+    'Operating System :: POSIX',
+    'Programming Language :: Python :: 2.7',
+    'Topic :: Software Development',
+]
+
+setup(name='typing',
+      version=version,
+      description=description,
+      long_description=long_description,
+      author='Jukka Lehtosalo',
+      author_email='jukka.lehtosalo at iki.fi',
+      url='http://www.mypy-lang.org/',
+      license='MIT License',
+      platforms=['POSIX'],
+      py_modules=['typing'],
+      classifiers=classifiers,
+      )
diff --git a/lib-typing/2.7/test_typing.py b/lib-typing/2.7/test_typing.py
new file mode 100644
index 0000000..39eb7c1
--- /dev/null
+++ b/lib-typing/2.7/test_typing.py
@@ -0,0 +1,1629 @@
+from __future__ import absolute_import, unicode_literals
+
+import collections
+import pickle
+import re
+import sys
+from unittest import TestCase, main, SkipTest
+from copy import copy, deepcopy
+
+from typing import Any
+from typing import TypeVar, AnyStr
+from typing import T, KT, VT  # Not in __all__.
+from typing import Union, Optional
+from typing import Tuple, List, MutableMapping
+from typing import Callable
+from typing import Generic, ClassVar
+from typing import cast
+from typing import Type
+from typing import NewType
+from typing import NamedTuple
+from typing import IO, TextIO, BinaryIO
+from typing import Pattern, Match
+import abc
+import typing
+try:
+    import collections.abc as collections_abc
+except ImportError:
+    import collections as collections_abc  # Fallback for PY3.2.
+
+
+class BaseTestCase(TestCase):
+
+    def assertIsSubclass(self, cls, class_or_tuple, msg=None):
+        if not issubclass(cls, class_or_tuple):
+            message = '%r is not a subclass of %r' % (cls, class_or_tuple)
+            if msg is not None:
+                message += ' : %s' % msg
+            raise self.failureException(message)
+
+    def assertNotIsSubclass(self, cls, class_or_tuple, msg=None):
+        if issubclass(cls, class_or_tuple):
+            message = '%r is a subclass of %r' % (cls, class_or_tuple)
+            if msg is not None:
+                message += ' : %s' % msg
+            raise self.failureException(message)
+
+    def clear_caches(self):
+        for f in typing._cleanups:
+            f()
+
+
+class Employee(object):
+    pass
+
+
+class Manager(Employee):
+    pass
+
+
+class Founder(Employee):
+    pass
+
+
+class ManagingFounder(Manager, Founder):
+    pass
+
+
+class AnyTests(BaseTestCase):
+
+    def test_any_instance_type_error(self):
+        with self.assertRaises(TypeError):
+            isinstance(42, Any)
+
+    def test_any_subclass_type_error(self):
+        with self.assertRaises(TypeError):
+            issubclass(Employee, Any)
+        with self.assertRaises(TypeError):
+            issubclass(Any, Employee)
+
+    def test_repr(self):
+        self.assertEqual(repr(Any), 'typing.Any')
+
+    def test_errors(self):
+        with self.assertRaises(TypeError):
+            issubclass(42, Any)
+        with self.assertRaises(TypeError):
+            Any[int]  # Any is not a generic type.
+
+    def test_cannot_subclass(self):
+        with self.assertRaises(TypeError):
+            class A(Any):
+                pass
+        with self.assertRaises(TypeError):
+            class A(type(Any)):
+                pass
+
+    def test_cannot_instantiate(self):
+        with self.assertRaises(TypeError):
+            Any()
+        with self.assertRaises(TypeError):
+            type(Any)()
+
+    def test_cannot_subscript(self):
+        with self.assertRaises(TypeError):
+            Any[int]
+
+    def test_any_is_subclass(self):
+        # These expressions must simply not fail.
+        typing.Match[Any]
+        typing.Pattern[Any]
+        typing.IO[Any]
+
+
+class TypeVarTests(BaseTestCase):
+
+    def test_basic_plain(self):
+        T = TypeVar('T')
+        # T equals itself.
+        self.assertEqual(T, T)
+        # T is an instance of TypeVar
+        self.assertIsInstance(T, TypeVar)
+
+    def test_typevar_instance_type_error(self):
+        T = TypeVar('T')
+        with self.assertRaises(TypeError):
+            isinstance(42, T)
+
+    def test_typevar_subclass_type_error(self):
+        T = TypeVar('T')
+        with self.assertRaises(TypeError):
+            issubclass(int, T)
+        with self.assertRaises(TypeError):
+            issubclass(T, int)
+
+    def test_constrained_error(self):
+        with self.assertRaises(TypeError):
+            X = TypeVar('X', int)
+            X
+
+    def test_union_unique(self):
+        X = TypeVar('X')
+        Y = TypeVar('Y')
+        self.assertNotEqual(X, Y)
+        self.assertEqual(Union[X], X)
+        self.assertNotEqual(Union[X], Union[X, Y])
+        self.assertEqual(Union[X, X], X)
+        self.assertNotEqual(Union[X, int], Union[X])
+        self.assertNotEqual(Union[X, int], Union[int])
+        self.assertEqual(Union[X, int].__args__, (X, int))
+        self.assertEqual(Union[X, int].__parameters__, (X,))
+        self.assertIs(Union[X, int].__origin__, Union)
+
+    def test_union_constrained(self):
+        A = TypeVar('A', str, bytes)
+        self.assertNotEqual(Union[A, str], Union[A])
+
+    def test_repr(self):
+        self.assertEqual(repr(T), '~T')
+        self.assertEqual(repr(KT), '~KT')
+        self.assertEqual(repr(VT), '~VT')
+        self.assertEqual(repr(AnyStr), '~AnyStr')
+        T_co = TypeVar('T_co', covariant=True)
+        self.assertEqual(repr(T_co), '+T_co')
+        T_contra = TypeVar('T_contra', contravariant=True)
+        self.assertEqual(repr(T_contra), '-T_contra')
+
+    def test_no_redefinition(self):
+        self.assertNotEqual(TypeVar('T'), TypeVar('T'))
+        self.assertNotEqual(TypeVar('T', int, str), TypeVar('T', int, str))
+
+    def test_cannot_subclass_vars(self):
+        with self.assertRaises(TypeError):
+            class V(TypeVar('T')):
+                pass
+
+    def test_cannot_subclass_var_itself(self):
+        with self.assertRaises(TypeError):
+            class V(TypeVar):
+                pass
+
+    def test_cannot_instantiate_vars(self):
+        with self.assertRaises(TypeError):
+            TypeVar('A')()
+
+    def test_bound_errors(self):
+        with self.assertRaises(TypeError):
+            TypeVar('X', bound=42)
+        with self.assertRaises(TypeError):
+            TypeVar('X', str, float, bound=Employee)
+
+
+class UnionTests(BaseTestCase):
+
+    def test_basics(self):
+        u = Union[int, float]
+        self.assertNotEqual(u, Union)
+
+    def test_subclass_error(self):
+        with self.assertRaises(TypeError):
+            issubclass(int, Union)
+        with self.assertRaises(TypeError):
+            issubclass(Union, int)
+        with self.assertRaises(TypeError):
+            issubclass(int, Union[int, str])
+        with self.assertRaises(TypeError):
+            issubclass(Union[int, str], int)
+
+    def test_union_any(self):
+        u = Union[Any]
+        self.assertEqual(u, Any)
+        u1 = Union[int, Any]
+        u2 = Union[Any, int]
+        u3 = Union[Any, object]
+        self.assertEqual(u1, u2)
+        self.assertNotEqual(u1, Any)
+        self.assertNotEqual(u2, Any)
+        self.assertNotEqual(u3, Any)
+
+    def test_union_object(self):
+        u = Union[object]
+        self.assertEqual(u, object)
+        u = Union[int, object]
+        self.assertEqual(u, object)
+        u = Union[object, int]
+        self.assertEqual(u, object)
+
+    def test_unordered(self):
+        u1 = Union[int, float]
+        u2 = Union[float, int]
+        self.assertEqual(u1, u2)
+
+    def test_single_class_disappears(self):
+        t = Union[Employee]
+        self.assertIs(t, Employee)
+
+    def test_base_class_disappears(self):
+        u = Union[Employee, Manager, int]
+        self.assertEqual(u, Union[int, Employee])
+        u = Union[Manager, int, Employee]
+        self.assertEqual(u, Union[int, Employee])
+        u = Union[Employee, Manager]
+        self.assertIs(u, Employee)
+
+    def test_union_union(self):
+        u = Union[int, float]
+        v = Union[u, Employee]
+        self.assertEqual(v, Union[int, float, Employee])
+
+    def test_repr(self):
+        self.assertEqual(repr(Union), 'typing.Union')
+        u = Union[Employee, int]
+        self.assertEqual(repr(u), 'typing.Union[%s.Employee, int]' % __name__)
+        u = Union[int, Employee]
+        self.assertEqual(repr(u), 'typing.Union[int, %s.Employee]' % __name__)
+
+    def test_cannot_subclass(self):
+        with self.assertRaises(TypeError):
+            class C(Union):
+                pass
+        with self.assertRaises(TypeError):
+            class C(type(Union)):
+                pass
+        with self.assertRaises(TypeError):
+            class C(Union[int, str]):
+                pass
+
+    def test_cannot_instantiate(self):
+        with self.assertRaises(TypeError):
+            Union()
+        u = Union[int, float]
+        with self.assertRaises(TypeError):
+            u()
+        with self.assertRaises(TypeError):
+            type(u)()
+
+    def test_union_generalization(self):
+        self.assertFalse(Union[str, typing.Iterable[int]] == str)
+        self.assertFalse(Union[str, typing.Iterable[int]] == typing.Iterable[int])
+        self.assertTrue(Union[str, typing.Iterable] == typing.Iterable)
+
+    def test_optional(self):
+        o = Optional[int]
+        u = Union[int, None]
+        self.assertEqual(o, u)
+
+    def test_empty(self):
+        with self.assertRaises(TypeError):
+            Union[()]
+
+    def test_union_instance_type_error(self):
+        with self.assertRaises(TypeError):
+            isinstance(42, Union[int, str])
+
+    def test_union_str_pattern(self):
+        # Shouldn't crash; see http://bugs.python.org/issue25390
+        A = Union[str, Pattern]
+        A
+
+    def test_etree(self):
+        # See https://github.com/python/typing/issues/229
+        # (Only relevant for Python 2.)
+        try:
+            from xml.etree.cElementTree import Element
+        except ImportError:
+            raise SkipTest("cElementTree not found")
+        Union[Element, str]  # Shouldn't crash
+
+        def Elem(*args):
+            return Element(*args)
+
+        Union[Elem, str]  # Nor should this
+
+
+class TupleTests(BaseTestCase):
+
+    def test_basics(self):
+        with self.assertRaises(TypeError):
+            issubclass(Tuple, Tuple[int, str])
+        with self.assertRaises(TypeError):
+            issubclass(tuple, Tuple[int, str])
+
+        class TP(tuple): pass
+        self.assertTrue(issubclass(tuple, Tuple))
+        self.assertTrue(issubclass(TP, Tuple))
+
+    def test_equality(self):
+        self.assertEqual(Tuple[int], Tuple[int])
+        self.assertEqual(Tuple[int, ...], Tuple[int, ...])
+        self.assertNotEqual(Tuple[int], Tuple[int, int])
+        self.assertNotEqual(Tuple[int], Tuple[int, ...])
+
+    def test_tuple_subclass(self):
+        class MyTuple(tuple):
+            pass
+        self.assertTrue(issubclass(MyTuple, Tuple))
+
+    def test_tuple_instance_type_error(self):
+        with self.assertRaises(TypeError):
+            isinstance((0, 0), Tuple[int, int])
+        isinstance((0, 0), Tuple)
+
+    def test_repr(self):
+        self.assertEqual(repr(Tuple), 'typing.Tuple')
+        self.assertEqual(repr(Tuple[()]), 'typing.Tuple[()]')
+        self.assertEqual(repr(Tuple[int, float]), 'typing.Tuple[int, float]')
+        self.assertEqual(repr(Tuple[int, ...]), 'typing.Tuple[int, ...]')
+
+    def test_errors(self):
+        with self.assertRaises(TypeError):
+            issubclass(42, Tuple)
+        with self.assertRaises(TypeError):
+            issubclass(42, Tuple[int])
+
+
+class CallableTests(BaseTestCase):
+
+    def test_self_subclass(self):
+        with self.assertRaises(TypeError):
+            self.assertTrue(issubclass(type(lambda x: x), Callable[[int], int]))
+        self.assertTrue(issubclass(type(lambda x: x), Callable))
+
+    def test_eq_hash(self):
+        self.assertEqual(Callable[[int], int], Callable[[int], int])
+        self.assertEqual(len({Callable[[int], int], Callable[[int], int]}), 1)
+        self.assertNotEqual(Callable[[int], int], Callable[[int], str])
+        self.assertNotEqual(Callable[[int], int], Callable[[str], int])
+        self.assertNotEqual(Callable[[int], int], Callable[[int, int], int])
+        self.assertNotEqual(Callable[[int], int], Callable[[], int])
+        self.assertNotEqual(Callable[[int], int], Callable)
+
+    def test_cannot_instantiate(self):
+        with self.assertRaises(TypeError):
+            Callable()
+        with self.assertRaises(TypeError):
+            type(Callable)()
+        c = Callable[[int], str]
+        with self.assertRaises(TypeError):
+            c()
+        with self.assertRaises(TypeError):
+            type(c)()
+
+    def test_callable_wrong_forms(self):
+        with self.assertRaises(TypeError):
+            Callable[(), int]
+        with self.assertRaises(TypeError):
+            Callable[[()], int]
+        with self.assertRaises(TypeError):
+            Callable[[int, 1], 2]
+
+    def test_callable_instance_works(self):
+        def f():
+            pass
+        self.assertIsInstance(f, Callable)
+        self.assertNotIsInstance(None, Callable)
+
+    def test_callable_instance_type_error(self):
+        def f():
+            pass
+        with self.assertRaises(TypeError):
+            self.assertIsInstance(f, Callable[[], None])
+        with self.assertRaises(TypeError):
+            self.assertIsInstance(f, Callable[[], Any])
+        with self.assertRaises(TypeError):
+            self.assertNotIsInstance(None, Callable[[], None])
+        with self.assertRaises(TypeError):
+            self.assertNotIsInstance(None, Callable[[], Any])
+
+    def test_repr(self):
+        ct0 = Callable[[], bool]
+        self.assertEqual(repr(ct0), 'typing.Callable[[], bool]')
+        ct2 = Callable[[str, float], int]
+        self.assertEqual(repr(ct2), 'typing.Callable[[str, float], int]')
+        ctv = Callable[..., str]
+        self.assertEqual(repr(ctv), 'typing.Callable[..., str]')
+
+    def test_ellipsis_in_generic(self):
+        # Shouldn't crash; see https://github.com/python/typing/issues/259
+        typing.List[Callable[..., str]]
+
+
+XK = TypeVar('XK', unicode, bytes)
+XV = TypeVar('XV')
+
+
+class SimpleMapping(Generic[XK, XV]):
+
+    def __getitem__(self, key):
+        pass
+
+    def __setitem__(self, key, value):
+        pass
+
+    def get(self, key, default=None):
+        pass
+
+
+class MySimpleMapping(SimpleMapping[XK, XV]):
+
+    def __init__(self):
+        self.store = {}
+
+    def __getitem__(self, key):
+        return self.store[key]
+
+    def __setitem__(self, key, value):
+        self.store[key] = value
+
+    def get(self, key, default=None):
+        try:
+            return self.store[key]
+        except KeyError:
+            return default
+
+
+class ProtocolTests(BaseTestCase):
+
+    def test_supports_int(self):
+        self.assertIsSubclass(int, typing.SupportsInt)
+        self.assertNotIsSubclass(str, typing.SupportsInt)
+
+    def test_supports_float(self):
+        self.assertIsSubclass(float, typing.SupportsFloat)
+        self.assertNotIsSubclass(str, typing.SupportsFloat)
+
+    def test_supports_complex(self):
+
+        # Note: complex itself doesn't have __complex__.
+        class C(object):
+            def __complex__(self):
+                return 0j
+
+        self.assertIsSubclass(C, typing.SupportsComplex)
+        self.assertNotIsSubclass(str, typing.SupportsComplex)
+
+    def test_supports_abs(self):
+        self.assertIsSubclass(float, typing.SupportsAbs)
+        self.assertIsSubclass(int, typing.SupportsAbs)
+        self.assertNotIsSubclass(str, typing.SupportsAbs)
+
+    def test_reversible(self):
+        self.assertIsSubclass(list, typing.Reversible)
+        self.assertNotIsSubclass(int, typing.Reversible)
+
+    def test_protocol_instance_type_error(self):
+        with self.assertRaises(TypeError):
+            isinstance(0, typing.SupportsAbs)
+        class C1(typing.SupportsInt):
+            def __int__(self):
+                return 42
+        class C2(C1):
+            pass
+        c = C2()
+        self.assertIsInstance(c, C1)
+
+class GenericTests(BaseTestCase):
+
+    def test_basics(self):
+        X = SimpleMapping[str, Any]
+        self.assertEqual(X.__parameters__, ())
+        with self.assertRaises(TypeError):
+            X[unicode]
+        with self.assertRaises(TypeError):
+            X[unicode, unicode]
+        Y = SimpleMapping[XK, unicode]
+        self.assertEqual(Y.__parameters__, (XK,))
+        Y[unicode]
+        with self.assertRaises(TypeError):
+            Y[unicode, unicode]
+
+    def test_generic_errors(self):
+        T = TypeVar('T')
+        with self.assertRaises(TypeError):
+            Generic[T]()
+        with self.assertRaises(TypeError):
+            isinstance([], List[int])
+        with self.assertRaises(TypeError):
+            issubclass(list, List[int])
+
+    def test_init(self):
+        T = TypeVar('T')
+        S = TypeVar('S')
+        with self.assertRaises(TypeError):
+            Generic[T, T]
+        with self.assertRaises(TypeError):
+            Generic[T, S, T]
+
+    def test_repr(self):
+        self.assertEqual(repr(SimpleMapping),
+                         __name__ + '.' + 'SimpleMapping')
+        self.assertEqual(repr(MySimpleMapping),
+                         __name__ + '.' + 'MySimpleMapping')
+
+    def test_chain_repr(self):
+        T = TypeVar('T')
+        S = TypeVar('S')
+
+        class C(Generic[T]):
+            pass
+
+        X = C[Tuple[S, T]]
+        self.assertEqual(X, C[Tuple[S, T]])
+        self.assertNotEqual(X, C[Tuple[T, S]])
+
+        Y = X[T, int]
+        self.assertEqual(Y, X[T, int])
+        self.assertNotEqual(Y, X[S, int])
+        self.assertNotEqual(Y, X[T, str])
+
+        Z = Y[str]
+        self.assertEqual(Z, Y[str])
+        self.assertNotEqual(Z, Y[int])
+        self.assertNotEqual(Z, Y[T])
+
+        self.assertTrue(str(Z).endswith(
+            '.C[typing.Tuple[str, int]]'))
+
+    def test_new_repr(self):
+        T = TypeVar('T')
+        U = TypeVar('U', covariant=True)
+        S = TypeVar('S')
+
+        self.assertEqual(repr(List), 'typing.List')
+        self.assertEqual(repr(List[T]), 'typing.List[~T]')
+        self.assertEqual(repr(List[U]), 'typing.List[+U]')
+        self.assertEqual(repr(List[S][T][int]), 'typing.List[int]')
+        self.assertEqual(repr(List[int]), 'typing.List[int]')
+
+    def test_new_repr_complex(self):
+        T = TypeVar('T')
+        TS = TypeVar('TS')
+
+        self.assertEqual(repr(typing.Mapping[T, TS][TS, T]), 'typing.Mapping[~TS, ~T]')
+        self.assertEqual(repr(List[Tuple[T, TS]][int, T]),
+                         'typing.List[typing.Tuple[int, ~T]]')
+        self.assertEqual(repr(List[Tuple[T, T]][List[int]]),
+                 'typing.List[typing.Tuple[typing.List[int], typing.List[int]]]')
+
+    def test_new_repr_bare(self):
+        T = TypeVar('T')
+        self.assertEqual(repr(Generic[T]), 'typing.Generic[~T]')
+        self.assertEqual(repr(typing._Protocol[T]), 'typing.Protocol[~T]')
+        class C(typing.Dict[Any, Any]): pass
+        # this line should just work
+        repr(C.__mro__)
+
+    def test_dict(self):
+        T = TypeVar('T')
+
+        class B(Generic[T]):
+            pass
+
+        b = B()
+        b.foo = 42
+        self.assertEqual(b.__dict__, {'foo': 42})
+
+        class C(B[int]):
+            pass
+
+        c = C()
+        c.bar = 'abc'
+        self.assertEqual(c.__dict__, {'bar': 'abc'})
+
+    def test_false_subclasses(self):
+        class MyMapping(MutableMapping[str, str]): pass
+        self.assertNotIsInstance({}, MyMapping)
+        self.assertNotIsSubclass(dict, MyMapping)
+
+    def test_abc_bases(self):
+        class MM(MutableMapping[str, str]):
+            def __getitem__(self, k):
+                return None
+            def __setitem__(self, k, v):
+                pass
+            def __delitem__(self, k):
+                pass
+            def __iter__(self):
+                return iter(())
+            def __len__(self):
+                return 0
+        # this should just work
+        MM().update()
+        self.assertIsInstance(MM(), collections_abc.MutableMapping)
+        self.assertIsInstance(MM(), MutableMapping)
+        self.assertNotIsInstance(MM(), List)
+        self.assertNotIsInstance({}, MM)
+
+    def test_multiple_bases(self):
+        class MM1(MutableMapping[str, str], collections_abc.MutableMapping):
+            pass
+        with self.assertRaises(TypeError):
+            # consistent MRO not possible
+            class MM2(collections_abc.MutableMapping, MutableMapping[str, str]):
+                pass
+
+    def test_orig_bases(self):
+        T = TypeVar('T')
+        class C(typing.Dict[str, T]): pass
+        self.assertEqual(C.__orig_bases__, (typing.Dict[str, T],))
+
+    def test_naive_runtime_checks(self):
+        def naive_dict_check(obj, tp):
+            # Check if a dictionary conforms to Dict type
+            if len(tp.__parameters__) > 0:
+                raise NotImplementedError
+            if tp.__args__:
+                KT, VT = tp.__args__
+                return all(isinstance(k, KT) and isinstance(v, VT)
+                   for k, v in obj.items())
+        self.assertTrue(naive_dict_check({'x': 1}, typing.Dict[typing.Text, int]))
+        self.assertFalse(naive_dict_check({1: 'x'}, typing.Dict[typing.Text, int]))
+        with self.assertRaises(NotImplementedError):
+            naive_dict_check({1: 'x'}, typing.Dict[typing.Text, T])
+
+        def naive_generic_check(obj, tp):
+            # Check if an instance conforms to the generic class
+            if not hasattr(obj, '__orig_class__'):
+                raise NotImplementedError
+            return obj.__orig_class__ == tp
+        class Node(Generic[T]): pass
+        self.assertTrue(naive_generic_check(Node[int](), Node[int]))
+        self.assertFalse(naive_generic_check(Node[str](), Node[int]))
+        self.assertFalse(naive_generic_check(Node[str](), List))
+        with self.assertRaises(NotImplementedError):
+            naive_generic_check([1,2,3], Node[int])
+
+        def naive_list_base_check(obj, tp):
+            # Check if list conforms to a List subclass
+            return all(isinstance(x, tp.__orig_bases__[0].__args__[0])
+                       for x in obj)
+        class C(List[int]): pass
+        self.assertTrue(naive_list_base_check([1, 2, 3], C))
+        self.assertFalse(naive_list_base_check(['a', 'b'], C))
+
+    def test_multi_subscr_base(self):
+        T = TypeVar('T')
+        U = TypeVar('U')
+        V = TypeVar('V')
+        class C(List[T][U][V]): pass
+        class D(C, List[T][U][V]): pass
+        self.assertEqual(C.__parameters__, (V,))
+        self.assertEqual(D.__parameters__, (V,))
+        self.assertEqual(C[int].__parameters__, ())
+        self.assertEqual(D[int].__parameters__, ())
+        self.assertEqual(C[int].__args__, (int,))
+        self.assertEqual(D[int].__args__, (int,))
+        self.assertEqual(C.__bases__, (List,))
+        self.assertEqual(D.__bases__, (C, List))
+        self.assertEqual(C.__orig_bases__, (List[T][U][V],))
+        self.assertEqual(D.__orig_bases__, (C, List[T][U][V]))
+
+    def test_extended_generic_rules_eq(self):
+        T = TypeVar('T')
+        U = TypeVar('U')
+        self.assertEqual(Tuple[T, T][int], Tuple[int, int])
+        self.assertEqual(typing.Iterable[Tuple[T, T]][T], typing.Iterable[Tuple[T, T]])
+        with self.assertRaises(TypeError):
+            Tuple[T, int][()]
+        with self.assertRaises(TypeError):
+            Tuple[T, U][T, ...]
+
+        self.assertEqual(Union[T, int][int], int)
+        self.assertEqual(Union[T, U][int, Union[int, str]], Union[int, str])
+        class Base(object): pass
+        class Derived(Base): pass
+        self.assertEqual(Union[T, Base][Derived], Base)
+        with self.assertRaises(TypeError):
+            Union[T, int][1]
+
+        self.assertEqual(Callable[[T], T][KT], Callable[[KT], KT])
+        self.assertEqual(Callable[..., List[T]][int], Callable[..., List[int]])
+        with self.assertRaises(TypeError):
+            Callable[[T], U][..., int]
+        with self.assertRaises(TypeError):
+            Callable[[T], U][[], int]
+
+    def test_extended_generic_rules_repr(self):
+        T = TypeVar('T')
+        self.assertEqual(repr(Union[Tuple, Callable]).replace('typing.', ''),
+                         'Union[Tuple, Callable]')
+        self.assertEqual(repr(Union[Tuple, Tuple[int]]).replace('typing.', ''),
+                         'Tuple')
+        self.assertEqual(repr(Callable[..., Optional[T]][int]).replace('typing.', ''),
+                         'Callable[..., Union[int, NoneType]]')
+        self.assertEqual(repr(Callable[[], List[T]][int]).replace('typing.', ''),
+                         'Callable[[], List[int]]')
+
+    def test_generic_forvard_ref(self):
+        LLT = List[List['CC']]
+        class CC: pass
+        self.assertEqual(typing._eval_type(LLT, globals(), locals()), List[List[CC]])
+        T = TypeVar('T')
+        AT = Tuple[T, ...]
+        self.assertIs(typing._eval_type(AT, globals(), locals()), AT)
+        CT = Callable[..., List[T]]
+        self.assertIs(typing._eval_type(CT, globals(), locals()), CT)
+
+    def test_extended_generic_rules_subclassing(self):
+        class T1(Tuple[T, KT]): pass
+        class T2(Tuple[T, ...]): pass
+        class C1(Callable[[T], T]): pass
+        class C2(Callable[..., int]):
+            def __call__(self):
+                return None
+
+        self.assertEqual(T1.__parameters__, (T, KT))
+        self.assertEqual(T1[int, str].__args__, (int, str))
+        self.assertEqual(T1[int, T].__origin__, T1)
+
+        self.assertEqual(T2.__parameters__, (T,))
+        with self.assertRaises(TypeError):
+            T1[int]
+        with self.assertRaises(TypeError):
+            T2[int, str]
+
+        self.assertEqual(repr(C1[int]).split('.')[-1], 'C1[int]')
+        self.assertEqual(C2.__parameters__, ())
+        self.assertIsInstance(C2(), collections_abc.Callable)
+        self.assertIsSubclass(C2, collections_abc.Callable)
+        self.assertIsSubclass(C1, collections_abc.Callable)
+        self.assertIsInstance(T1(), tuple)
+        self.assertIsSubclass(T2, tuple)
+        self.assertIsSubclass(Tuple[int, ...], typing.Sequence)
+        self.assertIsSubclass(Tuple[int, ...], typing.Iterable)
+
+    def test_fail_with_bare_union(self):
+        with self.assertRaises(TypeError):
+            List[Union]
+        with self.assertRaises(TypeError):
+            Tuple[Optional]
+        with self.assertRaises(TypeError):
+            ClassVar[ClassVar]
+        with self.assertRaises(TypeError):
+            List[ClassVar[int]]
+
+    def test_fail_with_bare_generic(self):
+        T = TypeVar('T')
+        with self.assertRaises(TypeError):
+            List[Generic]
+        with self.assertRaises(TypeError):
+            Tuple[Generic[T]]
+        with self.assertRaises(TypeError):
+            List[typing._Protocol]
+
+    def test_type_erasure_special(self):
+        T = TypeVar('T')
+        # this is the only test that checks type caching
+        self.clear_caches()
+        class MyTup(Tuple[T, T]): pass
+        self.assertIs(MyTup[int]().__class__, MyTup)
+        self.assertIs(MyTup[int]().__orig_class__, MyTup[int])
+        class MyCall(Callable[..., T]):
+            def __call__(self): return None
+        self.assertIs(MyCall[T]().__class__, MyCall)
+        self.assertIs(MyCall[T]().__orig_class__, MyCall[T])
+        class MyDict(typing.Dict[T, T]): pass
+        self.assertIs(MyDict[int]().__class__, MyDict)
+        self.assertIs(MyDict[int]().__orig_class__, MyDict[int])
+        class MyDef(typing.DefaultDict[str, T]): pass
+        self.assertIs(MyDef[int]().__class__, MyDef)
+        self.assertIs(MyDef[int]().__orig_class__, MyDef[int])
+
+    def test_all_repr_eq_any(self):
+        objs = (getattr(typing, el) for el in typing.__all__)
+        for obj in objs:
+            self.assertNotEqual(repr(obj), '')
+            self.assertEqual(obj, obj)
+            if getattr(obj, '__parameters__', None) and len(obj.__parameters__) == 1:
+                self.assertEqual(obj[Any].__args__, (Any,))
+            if isinstance(obj, type):
+                for base in obj.__mro__:
+                    self.assertNotEqual(repr(base), '')
+                    self.assertEqual(base, base)
+
+    def test_pickle(self):
+        global C  # pickle wants to reference the class by name
+        T = TypeVar('T')
+
+        class B(Generic[T]):
+            pass
+
+        class C(B[int]):
+            pass
+
+        c = C()
+        c.foo = 42
+        c.bar = 'abc'
+        for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+            z = pickle.dumps(c, proto)
+            x = pickle.loads(z)
+            self.assertEqual(x.foo, 42)
+            self.assertEqual(x.bar, 'abc')
+            self.assertEqual(x.__dict__, {'foo': 42, 'bar': 'abc'})
+        simples = [Any, Union, Tuple, Callable, ClassVar, List, typing.Iterable]
+        for s in simples:
+            for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+                z = pickle.dumps(s, proto)
+                x = pickle.loads(z)
+                self.assertEqual(s, x)
+
+    def test_copy_and_deepcopy(self):
+        T = TypeVar('T')
+        class Node(Generic[T]): pass
+        things = [Any, Union[T, int], Tuple[T, int], Callable[..., T], Callable[[int], int],
+                  Tuple[Any, Any], Node[T], Node[int], Node[Any], typing.Iterable[T],
+                  typing.Iterable[Any], typing.Iterable[int], typing.Dict[int, str],
+                  typing.Dict[T, Any], ClassVar[int], ClassVar[List[T]], Tuple['T', 'T'],
+                  Union['T', int], List['T'], typing.Mapping['T', int]]
+        for t in things:
+            self.assertEqual(t, deepcopy(t))
+            self.assertEqual(t, copy(t))
+
+    def test_parameterized_slots(self):
+        T = TypeVar('T')
+        class C(Generic[T]):
+            __slots__ = ('potato',)
+
+        c = C()
+        c_int = C[int]()
+        self.assertEqual(C.__slots__, C[str].__slots__)
+
+        c.potato = 0
+        c_int.potato = 0
+        with self.assertRaises(AttributeError):
+            c.tomato = 0
+        with self.assertRaises(AttributeError):
+            c_int.tomato = 0
+
+        self.assertEqual(typing._eval_type(C['C'], globals(), locals()), C[C])
+        self.assertEqual(typing._eval_type(C['C'], globals(), locals()).__slots__,
+                         C.__slots__)
+        self.assertEqual(copy(C[int]), deepcopy(C[int]))
+
+    def test_parameterized_slots_dict(self):
+        T = TypeVar('T')
+        class D(Generic[T]):
+            __slots__ = {'banana': 42}
+
+        d = D()
+        d_int = D[int]()
+        self.assertEqual(D.__slots__, D[str].__slots__)
+
+        d.banana = 'yes'
+        d_int.banana = 'yes'
+        with self.assertRaises(AttributeError):
+            d.foobar = 'no'
+        with self.assertRaises(AttributeError):
+            d_int.foobar = 'no'
+
+    def test_errors(self):
+        with self.assertRaises(TypeError):
+            B = SimpleMapping[XK, Any]
+
+            class C(Generic[B]):
+                pass
+
+    def test_repr_2(self):
+        PY32 = sys.version_info[:2] < (3, 3)
+
+        class C(Generic[T]):
+            pass
+
+        self.assertEqual(C.__module__, __name__)
+        if not PY32:
+            self.assertEqual(C.__qualname__,
+                             'GenericTests.test_repr_2.<locals>.C')
+        self.assertEqual(repr(C).split('.')[-1], 'C')
+        X = C[int]
+        self.assertEqual(X.__module__, __name__)
+        if not PY32:
+            self.assertTrue(X.__qualname__.endswith('.<locals>.C'))
+        self.assertEqual(repr(X).split('.')[-1], 'C[int]')
+
+        class Y(C[int]):
+            pass
+
+        self.assertEqual(Y.__module__, __name__)
+        if not PY32:
+            self.assertEqual(Y.__qualname__,
+                             'GenericTests.test_repr_2.<locals>.Y')
+        self.assertEqual(repr(Y).split('.')[-1], 'Y')
+
+    def test_eq_1(self):
+        self.assertEqual(Generic, Generic)
+        self.assertEqual(Generic[T], Generic[T])
+        self.assertNotEqual(Generic[KT], Generic[VT])
+
+    def test_eq_2(self):
+
+        class A(Generic[T]):
+            pass
+
+        class B(Generic[T]):
+            pass
+
+        self.assertEqual(A, A)
+        self.assertNotEqual(A, B)
+        self.assertEqual(A[T], A[T])
+        self.assertNotEqual(A[T], B[T])
+
+    def test_multiple_inheritance(self):
+
+        class A(Generic[T, VT]):
+            pass
+
+        class B(Generic[KT, T]):
+            pass
+
+        class C(A[T, VT], Generic[VT, T, KT], B[KT, T]):
+            pass
+
+        self.assertEqual(C.__parameters__, (VT, T, KT))
+
+    def test_nested(self):
+
+        G = Generic
+
+        class Visitor(G[T]):
+
+            a = None
+
+            def set(self, a):
+                self.a = a
+
+            def get(self):
+                return self.a
+
+            def visit(self):
+                return self.a
+
+        V = Visitor[typing.List[int]]
+
+        class IntListVisitor(V):
+
+            def append(self, x):
+                self.a.append(x)
+
+        a = IntListVisitor()
+        a.set([])
+        a.append(1)
+        a.append(42)
+        self.assertEqual(a.get(), [1, 42])
+
+    def test_type_erasure(self):
+        T = TypeVar('T')
+
+        class Node(Generic[T]):
+            def __init__(self, label,
+                         left = None,
+                         right = None):
+                self.label = label  # type: T
+                self.left = left  # type: Optional[Node[T]]
+                self.right = right  # type: Optional[Node[T]]
+
+        def foo(x):
+            a = Node(x)
+            b = Node[T](x)
+            c = Node[Any](x)
+            self.assertIs(type(a), Node)
+            self.assertIs(type(b), Node)
+            self.assertIs(type(c), Node)
+            self.assertEqual(a.label, x)
+            self.assertEqual(b.label, x)
+            self.assertEqual(c.label, x)
+
+        foo(42)
+
+    def test_implicit_any(self):
+        T = TypeVar('T')
+
+        class C(Generic[T]):
+            pass
+
+        class D(C):
+            pass
+
+        self.assertEqual(D.__parameters__, ())
+
+        with self.assertRaises(Exception):
+            D[int]
+        with self.assertRaises(Exception):
+            D[Any]
+        with self.assertRaises(Exception):
+            D[T]
+
+class ClassVarTests(BaseTestCase):
+
+    def test_basics(self):
+        with self.assertRaises(TypeError):
+            ClassVar[1]
+        with self.assertRaises(TypeError):
+            ClassVar[int, str]
+        with self.assertRaises(TypeError):
+            ClassVar[int][str]
+
+    def test_repr(self):
+        self.assertEqual(repr(ClassVar), 'typing.ClassVar')
+        cv = ClassVar[int]
+        self.assertEqual(repr(cv), 'typing.ClassVar[int]')
+        cv = ClassVar[Employee]
+        self.assertEqual(repr(cv), 'typing.ClassVar[%s.Employee]' % __name__)
+
+    def test_cannot_subclass(self):
+        with self.assertRaises(TypeError):
+            class C(type(ClassVar)):
+                pass
+        with self.assertRaises(TypeError):
+            class C(type(ClassVar[int])):
+                pass
+
+    def test_cannot_init(self):
+        with self.assertRaises(TypeError):
+            ClassVar()
+        with self.assertRaises(TypeError):
+            type(ClassVar)()
+        with self.assertRaises(TypeError):
+            type(ClassVar[Optional[int]])()
+
+    def test_no_isinstance(self):
+        with self.assertRaises(TypeError):
+            isinstance(1, ClassVar[int])
+        with self.assertRaises(TypeError):
+            issubclass(int, ClassVar)
+
+
+class CastTests(BaseTestCase):
+
+    def test_basics(self):
+        self.assertEqual(cast(int, 42), 42)
+        self.assertEqual(cast(float, 42), 42)
+        self.assertIs(type(cast(float, 42)), int)
+        self.assertEqual(cast(Any, 42), 42)
+        self.assertEqual(cast(list, 42), 42)
+        self.assertEqual(cast(Union[str, float], 42), 42)
+        self.assertEqual(cast(AnyStr, 42), 42)
+        self.assertEqual(cast(None, 42), 42)
+
+    def test_errors(self):
+        # Bogus calls are not expected to fail.
+        cast(42, 42)
+        cast('hello', 42)
+
+
+class ForwardRefTests(BaseTestCase):
+
+    def test_forwardref_instance_type_error(self):
+        fr = typing._ForwardRef('int')
+        with self.assertRaises(TypeError):
+            isinstance(42, fr)
+
+    def test_syntax_error(self):
+
+        with self.assertRaises(SyntaxError):
+            Generic['/T']
+
+
+class OverloadTests(BaseTestCase):
+
+    def test_overload_exists(self):
+        from typing import overload
+
+    def test_overload_fails(self):
+        from typing import overload
+
+        with self.assertRaises(RuntimeError):
+
+            @overload
+            def blah():
+                pass
+
+            blah()
+
+    def test_overload_succeeds(self):
+        from typing import overload
+
+        @overload
+        def blah():
+            pass
+
+        def blah():
+            pass
+
+        blah()
+
+
+class CollectionsAbcTests(BaseTestCase):
+
+    def test_hashable(self):
+        self.assertIsInstance(42, typing.Hashable)
+        self.assertNotIsInstance([], typing.Hashable)
+
+    def test_iterable(self):
+        self.assertIsInstance([], typing.Iterable)
+        # Due to ABC caching, the second time takes a separate code
+        # path and could fail.  So call this a few times.
+        self.assertIsInstance([], typing.Iterable)
+        self.assertIsInstance([], typing.Iterable)
+        self.assertNotIsInstance(42, typing.Iterable)
+        # Just in case, also test issubclass() a few times.
+        self.assertIsSubclass(list, typing.Iterable)
+        self.assertIsSubclass(list, typing.Iterable)
+
+    def test_iterator(self):
+        it = iter([])
+        self.assertIsInstance(it, typing.Iterator)
+        self.assertNotIsInstance(42, typing.Iterator)
+
+    def test_sized(self):
+        self.assertIsInstance([], typing.Sized)
+        self.assertNotIsInstance(42, typing.Sized)
+
+    def test_container(self):
+        self.assertIsInstance([], typing.Container)
+        self.assertNotIsInstance(42, typing.Container)
+
+    def test_abstractset(self):
+        self.assertIsInstance(set(), typing.AbstractSet)
+        self.assertNotIsInstance(42, typing.AbstractSet)
+
+    def test_mutableset(self):
+        self.assertIsInstance(set(), typing.MutableSet)
+        self.assertNotIsInstance(frozenset(), typing.MutableSet)
+
+    def test_mapping(self):
+        self.assertIsInstance({}, typing.Mapping)
+        self.assertNotIsInstance(42, typing.Mapping)
+
+    def test_mutablemapping(self):
+        self.assertIsInstance({}, typing.MutableMapping)
+        self.assertNotIsInstance(42, typing.MutableMapping)
+
+    def test_sequence(self):
+        self.assertIsInstance([], typing.Sequence)
+        self.assertNotIsInstance(42, typing.Sequence)
+
+    def test_mutablesequence(self):
+        self.assertIsInstance([], typing.MutableSequence)
+        self.assertNotIsInstance((), typing.MutableSequence)
+
+    def test_bytestring(self):
+        self.assertIsInstance(b'', typing.ByteString)
+        self.assertIsInstance(bytearray(b''), typing.ByteString)
+
+    def test_list(self):
+        self.assertIsSubclass(list, typing.List)
+
+    def test_set(self):
+        self.assertIsSubclass(set, typing.Set)
+        self.assertNotIsSubclass(frozenset, typing.Set)
+
+    def test_frozenset(self):
+        self.assertIsSubclass(frozenset, typing.FrozenSet)
+        self.assertNotIsSubclass(set, typing.FrozenSet)
+
+    def test_dict(self):
+        self.assertIsSubclass(dict, typing.Dict)
+
+    def test_no_list_instantiation(self):
+        with self.assertRaises(TypeError):
+            typing.List()
+        with self.assertRaises(TypeError):
+            typing.List[T]()
+        with self.assertRaises(TypeError):
+            typing.List[int]()
+
+    def test_list_subclass(self):
+
+        class MyList(typing.List[int]):
+            pass
+
+        a = MyList()
+        self.assertIsInstance(a, MyList)
+        self.assertIsInstance(a, typing.Sequence)
+
+        self.assertIsSubclass(MyList, list)
+        self.assertNotIsSubclass(list, MyList)
+
+    def test_no_dict_instantiation(self):
+        with self.assertRaises(TypeError):
+            typing.Dict()
+        with self.assertRaises(TypeError):
+            typing.Dict[KT, VT]()
+        with self.assertRaises(TypeError):
+            typing.Dict[str, int]()
+
+    def test_dict_subclass(self):
+
+        class MyDict(typing.Dict[str, int]):
+            pass
+
+        d = MyDict()
+        self.assertIsInstance(d, MyDict)
+        self.assertIsInstance(d, typing.MutableMapping)
+
+        self.assertIsSubclass(MyDict, dict)
+        self.assertNotIsSubclass(dict, MyDict)
+
+    def test_no_defaultdict_instantiation(self):
+        with self.assertRaises(TypeError):
+            typing.DefaultDict()
+        with self.assertRaises(TypeError):
+            typing.DefaultDict[KT, VT]()
+        with self.assertRaises(TypeError):
+            typing.DefaultDict[str, int]()
+
+    def test_defaultdict_subclass(self):
+
+        class MyDefDict(typing.DefaultDict[str, int]):
+            pass
+
+        dd = MyDefDict()
+        self.assertIsInstance(dd, MyDefDict)
+
+        self.assertIsSubclass(MyDefDict, collections.defaultdict)
+        self.assertNotIsSubclass(collections.defaultdict, MyDefDict)
+
+    def test_no_set_instantiation(self):
+        with self.assertRaises(TypeError):
+            typing.Set()
+        with self.assertRaises(TypeError):
+            typing.Set[T]()
+        with self.assertRaises(TypeError):
+            typing.Set[int]()
+
+    def test_set_subclass_instantiation(self):
+
+        class MySet(typing.Set[int]):
+            pass
+
+        d = MySet()
+        self.assertIsInstance(d, MySet)
+
+    def test_no_frozenset_instantiation(self):
+        with self.assertRaises(TypeError):
+            typing.FrozenSet()
+        with self.assertRaises(TypeError):
+            typing.FrozenSet[T]()
+        with self.assertRaises(TypeError):
+            typing.FrozenSet[int]()
+
+    def test_frozenset_subclass_instantiation(self):
+
+        class MyFrozenSet(typing.FrozenSet[int]):
+            pass
+
+        d = MyFrozenSet()
+        self.assertIsInstance(d, MyFrozenSet)
+
+    def test_no_tuple_instantiation(self):
+        with self.assertRaises(TypeError):
+            Tuple()
+        with self.assertRaises(TypeError):
+            Tuple[T]()
+        with self.assertRaises(TypeError):
+            Tuple[int]()
+
+    def test_generator(self):
+        def foo():
+            yield 42
+        g = foo()
+        self.assertIsSubclass(type(g), typing.Generator)
+
+    def test_no_generator_instantiation(self):
+        with self.assertRaises(TypeError):
+            typing.Generator()
+        with self.assertRaises(TypeError):
+            typing.Generator[T, T, T]()
+        with self.assertRaises(TypeError):
+            typing.Generator[int, int, int]()
+
+    def test_subclassing(self):
+
+        class MMA(typing.MutableMapping):
+            pass
+
+        with self.assertRaises(TypeError):  # It's abstract
+            MMA()
+
+        class MMC(MMA):
+            def __getitem__(self, k):
+                return None
+            def __setitem__(self, k, v):
+                pass
+            def __delitem__(self, k):
+                pass
+            def __iter__(self):
+                return iter(())
+            def __len__(self):
+                return 0
+
+        self.assertEqual(len(MMC()), 0)
+        assert callable(MMC.update)
+        self.assertIsInstance(MMC(), typing.Mapping)
+
+        class MMB(typing.MutableMapping[KT, VT]):
+            def __getitem__(self, k):
+                return None
+            def __setitem__(self, k, v):
+                pass
+            def __delitem__(self, k):
+                pass
+            def __iter__(self):
+                return iter(())
+            def __len__(self):
+                return 0
+
+        self.assertEqual(len(MMB()), 0)
+        self.assertEqual(len(MMB[str, str]()), 0)
+        self.assertEqual(len(MMB[KT, VT]()), 0)
+
+        self.assertNotIsSubclass(dict, MMA)
+        self.assertNotIsSubclass(dict, MMB)
+
+        self.assertIsSubclass(MMA, typing.Mapping)
+        self.assertIsSubclass(MMB, typing.Mapping)
+        self.assertIsSubclass(MMC, typing.Mapping)
+
+        self.assertIsInstance(MMB[KT, VT](), typing.Mapping)
+        self.assertIsInstance(MMB[KT, VT](), collections.Mapping)
+
+        self.assertIsSubclass(MMA, collections.Mapping)
+        self.assertIsSubclass(MMB, collections.Mapping)
+        self.assertIsSubclass(MMC, collections.Mapping)
+
+        self.assertIsSubclass(MMB[str, str], typing.Mapping)
+        self.assertIsSubclass(MMC, MMA)
+
+        class I(typing.Iterable): pass
+        self.assertNotIsSubclass(list, I)
+
+        class G(typing.Generator[int, int, int]): pass
+        def g(): yield 0
+        self.assertIsSubclass(G, typing.Generator)
+        self.assertIsSubclass(G, typing.Iterable)
+        if hasattr(collections, 'Generator'):
+            self.assertIsSubclass(G, collections.Generator)
+        self.assertIsSubclass(G, collections.Iterable)
+        self.assertNotIsSubclass(type(g), G)
+
+    def test_subclassing_subclasshook(self):
+
+        class Base(typing.Iterable):
+            @classmethod
+            def __subclasshook__(cls, other):
+                if other.__name__ == 'Foo':
+                    return True
+                else:
+                    return False
+
+        class C(Base): pass
+        class Foo: pass
+        class Bar: pass
+        self.assertIsSubclass(Foo, Base)
+        self.assertIsSubclass(Foo, C)
+        self.assertNotIsSubclass(Bar, C)
+
+    def test_subclassing_register(self):
+
+        class A(typing.Container): pass
+        class B(A): pass
+
+        class C: pass
+        A.register(C)
+        self.assertIsSubclass(C, A)
+        self.assertNotIsSubclass(C, B)
+
+        class D: pass
+        B.register(D)
+        self.assertIsSubclass(D, A)
+        self.assertIsSubclass(D, B)
+
+        class M(): pass
+        collections.MutableMapping.register(M)
+        self.assertIsSubclass(M, typing.Mapping)
+
+    def test_collections_as_base(self):
+
+        class M(collections.Mapping): pass
+        self.assertIsSubclass(M, typing.Mapping)
+        self.assertIsSubclass(M, typing.Iterable)
+
+        class S(collections.MutableSequence): pass
+        self.assertIsSubclass(S, typing.MutableSequence)
+        self.assertIsSubclass(S, typing.Iterable)
+
+        class I(collections.Iterable): pass
+        self.assertIsSubclass(I, typing.Iterable)
+
+        class A(collections.Mapping): pass
+        class B: pass
+        A.register(B)
+        self.assertIsSubclass(B, typing.Mapping)
+
+class TypeTests(BaseTestCase):
+
+    def test_type_basic(self):
+
+        class User(object): pass
+        class BasicUser(User): pass
+        class ProUser(User): pass
+
+        def new_user(user_class):
+            # type: (Type[User]) -> User
+            return user_class()
+
+        joe = new_user(BasicUser)
+
+    def test_type_typevar(self):
+
+        class User(object): pass
+        class BasicUser(User): pass
+        class ProUser(User): pass
+
+        global U
+        U = TypeVar('U', bound=User)
+
+        def new_user(user_class):
+            # type: (Type[U]) -> U
+            return user_class()
+
+        joe = new_user(BasicUser)
+
+    def test_type_optional(self):
+        A = Optional[Type[BaseException]]
+
+        def foo(a):
+            # type: (A) -> Optional[BaseException]
+            if a is None:
+                return None
+            else:
+                return a()
+
+        assert isinstance(foo(KeyboardInterrupt), KeyboardInterrupt)
+        assert foo(None) is None
+
+
+class NewTypeTests(BaseTestCase):
+
+    def test_basic(self):
+        UserId = NewType('UserId', int)
+        UserName = NewType('UserName', str)
+        self.assertIsInstance(UserId(5), int)
+        self.assertIsInstance(UserName('Joe'), type('Joe'))
+        self.assertEqual(UserId(5) + 1, 6)
+
+    def test_errors(self):
+        UserId = NewType('UserId', int)
+        UserName = NewType('UserName', str)
+        with self.assertRaises(TypeError):
+            issubclass(UserId, int)
+        with self.assertRaises(TypeError):
+            class D(UserName):
+                pass
+
+
+class NamedTupleTests(BaseTestCase):
+
+    def test_basics(self):
+        Emp = NamedTuple('Emp', [('name', str), ('id', int)])
+        self.assertIsSubclass(Emp, tuple)
+        joe = Emp('Joe', 42)
+        jim = Emp(name='Jim', id=1)
+        self.assertIsInstance(joe, Emp)
+        self.assertIsInstance(joe, tuple)
+        self.assertEqual(joe.name, 'Joe')
+        self.assertEqual(joe.id, 42)
+        self.assertEqual(jim.name, 'Jim')
+        self.assertEqual(jim.id, 1)
+        self.assertEqual(Emp.__name__, 'Emp')
+        self.assertEqual(Emp._fields, ('name', 'id'))
+        self.assertEqual(Emp._field_types, dict(name=str, id=int))
+
+    def test_pickle(self):
+        global Emp  # pickle wants to reference the class by name
+        Emp = NamedTuple('Emp', [('name', str), ('id', int)])
+        jane = Emp('jane', 37)
+        for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+            z = pickle.dumps(jane, proto)
+            jane2 = pickle.loads(z)
+            self.assertEqual(jane2, jane)
+
+
+class IOTests(BaseTestCase):
+
+    def test_io_submodule(self):
+        from typing.io import IO, TextIO, BinaryIO, __all__, __name__
+        self.assertIs(IO, typing.IO)
+        self.assertIs(TextIO, typing.TextIO)
+        self.assertIs(BinaryIO, typing.BinaryIO)
+        self.assertEqual(set(__all__), set(['IO', 'TextIO', 'BinaryIO']))
+        self.assertEqual(__name__, 'typing.io')
+
+
+class RETests(BaseTestCase):
+    # Much of this is really testing _TypeAlias.
+
+    def test_basics(self):
+        pat = re.compile('[a-z]+', re.I)
+        self.assertIsSubclass(pat.__class__, Pattern)
+        self.assertIsSubclass(type(pat), Pattern)
+        self.assertIsInstance(pat, Pattern)
+
+        mat = pat.search('12345abcde.....')
+        self.assertIsSubclass(mat.__class__, Match)
+        self.assertIsSubclass(type(mat), Match)
+        self.assertIsInstance(mat, Match)
+
+        # these should just work
+        p = Pattern[Union[str, bytes]]
+        m = Match[Union[bytes, str]]
+
+    def test_errors(self):
+        with self.assertRaises(TypeError):
+            # Doesn't fit AnyStr.
+            Pattern[int]
+        with self.assertRaises(TypeError):
+            # Can't change type vars?
+            Match[T]
+        m = Match[Union[str, bytes]]
+        with self.assertRaises(TypeError):
+            # Too complicated?
+            m[str]
+        with self.assertRaises(TypeError):
+            # We don't support isinstance().
+            isinstance(42, Pattern[str])
+
+    def test_repr(self):
+        self.assertEqual(repr(Pattern), 'Pattern[~AnyStr]')
+        self.assertEqual(repr(Pattern[unicode]), 'Pattern[unicode]')
+        self.assertEqual(repr(Pattern[str]), 'Pattern[str]')
+        self.assertEqual(repr(Match), 'Match[~AnyStr]')
+        self.assertEqual(repr(Match[unicode]), 'Match[unicode]')
+        self.assertEqual(repr(Match[str]), 'Match[str]')
+
+    def test_re_submodule(self):
+        from typing.re import Match, Pattern, __all__, __name__
+        self.assertIs(Match, typing.Match)
+        self.assertIs(Pattern, typing.Pattern)
+        self.assertEqual(set(__all__), set(['Match', 'Pattern']))
+        self.assertEqual(__name__, 'typing.re')
+
+    def test_cannot_subclass(self):
+        with self.assertRaises(TypeError) as ex:
+
+            class A(typing.Match):
+                pass
+
+        self.assertEqual(str(ex.exception),
+                         "Cannot subclass typing._TypeAlias")
+
+
+class AllTests(BaseTestCase):
+    """Tests for __all__."""
+
+    def test_all(self):
+        from typing import __all__ as a
+        # Just spot-check the first and last of every category.
+        self.assertIn('AbstractSet', a)
+        self.assertIn('ValuesView', a)
+        self.assertIn('cast', a)
+        self.assertIn('overload', a)
+        # Check that io and re are not exported.
+        self.assertNotIn('io', a)
+        self.assertNotIn('re', a)
+        # Spot-check that stdlib modules aren't exported.
+        self.assertNotIn('os', a)
+        self.assertNotIn('sys', a)
+        # Check that Text is defined.
+        self.assertIn('Text', a)
+
+    def test_respect_no_type_check(self):
+        @typing.no_type_check
+        class NoTpCheck(object):
+            class Inn(object):
+                def __init__(self, x): pass
+                    # type: (this is not actualy a type) -> None
+        self.assertTrue(NoTpCheck.__no_type_check__)
+        self.assertTrue(NoTpCheck.Inn.__init__.__no_type_check__)
+
+    def test_get_type_hints_dummy(self):
+
+        def foo(x):
+            # type: (int) -> int
+            return x + 1
+
+        self.assertIsNone(typing.get_type_hints(foo))
+
+
+if __name__ == '__main__':
+    main()
diff --git a/lib-typing/2.7/typing.py b/lib-typing/2.7/typing.py
new file mode 100644
index 0000000..5627697
--- /dev/null
+++ b/lib-typing/2.7/typing.py
@@ -0,0 +1,2016 @@
+from __future__ import absolute_import, unicode_literals
+
+import abc
+from abc import abstractmethod, abstractproperty
+import collections
+import functools
+import re as stdlib_re  # Avoid confusion with the re we export.
+import sys
+import types
+try:
+    import collections.abc as collections_abc
+except ImportError:
+    import collections as collections_abc  # Fallback for PY3.2.
+
+
+# Please keep __all__ alphabetized within each category.
+__all__ = [
+    # Super-special typing primitives.
+    'Any',
+    'Callable',
+    'ClassVar',
+    'Generic',
+    'Optional',
+    'Tuple',
+    'Type',
+    'TypeVar',
+    'Union',
+
+    # ABCs (from collections.abc).
+    'AbstractSet',  # collections.abc.Set.
+    'ByteString',
+    'Container',
+    'Hashable',
+    'ItemsView',
+    'Iterable',
+    'Iterator',
+    'KeysView',
+    'Mapping',
+    'MappingView',
+    'MutableMapping',
+    'MutableSequence',
+    'MutableSet',
+    'Sequence',
+    'Sized',
+    'ValuesView',
+
+    # Structural checks, a.k.a. protocols.
+    'Reversible',
+    'SupportsAbs',
+    'SupportsFloat',
+    'SupportsInt',
+
+    # Concrete collection types.
+    'Dict',
+    'DefaultDict',
+    'List',
+    'Set',
+    'FrozenSet',
+    'NamedTuple',  # Not really a type.
+    'Generator',
+
+    # One-off things.
+    'AnyStr',
+    'cast',
+    'get_type_hints',
+    'NewType',
+    'no_type_check',
+    'no_type_check_decorator',
+    'overload',
+    'Text',
+    'TYPE_CHECKING',
+]
+
+# The pseudo-submodules 're' and 'io' are part of the public
+# namespace, but excluded from __all__ because they might stomp on
+# legitimate imports of those modules.
+
+
+def _qualname(x):
+    if sys.version_info[:2] >= (3, 3):
+        return x.__qualname__
+    else:
+        # Fall back to just name.
+        return x.__name__
+
+def _trim_name(nm):
+    if nm.startswith('_') and nm not in ('_TypeAlias',
+                    '_ForwardRef', '_TypingBase', '_FinalTypingBase'):
+        nm = nm[1:]
+    return nm
+
+
+class TypingMeta(type):
+    """Metaclass for most types defined in typing module
+    (not a part of public API).
+
+    This also defines a dummy constructor (all the work for most typing
+    constructs is done in __new__) and a nicer repr().
+    """
+
+    _is_protocol = False
+
+    def __new__(cls, name, bases, namespace):
+        return super(TypingMeta, cls).__new__(cls, str(name), bases, namespace)
+
+    @classmethod
+    def assert_no_subclassing(cls, bases):
+        for base in bases:
+            if isinstance(base, cls):
+                raise TypeError("Cannot subclass %s" %
+                                (', '.join(map(_type_repr, bases)) or '()'))
+
+    def __init__(self, *args, **kwds):
+        pass
+
+    def _eval_type(self, globalns, localns):
+        """Override this in subclasses to interpret forward references.
+
+        For example, List['C'] is internally stored as
+        List[_ForwardRef('C')], which should evaluate to List[C],
+        where C is an object found in globalns or localns (searching
+        localns first, of course).
+        """
+        return self
+
+    def _get_type_vars(self, tvars):
+        pass
+
+    def __repr__(self):
+        qname = _trim_name(_qualname(self))
+        return '%s.%s' % (self.__module__, qname)
+
+
+class _TypingBase(object):
+    """Internal indicator of special typing constructs."""
+    __metaclass__ = TypingMeta
+    __slots__ = ()
+
+    def __init__(self, *args, **kwds):
+        pass
+
+    def __new__(cls, *args, **kwds):
+        """Constructor.
+
+        This only exists to give a better error message in case
+        someone tries to subclass a special typing object (not a good idea).
+        """
+        if (len(args) == 3 and
+                isinstance(args[0], str) and
+                isinstance(args[1], tuple)):
+            # Close enough.
+            raise TypeError("Cannot subclass %r" % cls)
+        return super(_TypingBase, cls).__new__(cls)
+
+    # Things that are not classes also need these.
+    def _eval_type(self, globalns, localns):
+        return self
+
+    def _get_type_vars(self, tvars):
+        pass
+
+    def __repr__(self):
+        cls = type(self)
+        qname = _trim_name(_qualname(cls))
+        return '%s.%s' % (cls.__module__, qname)
+
+    def __call__(self, *args, **kwds):
+        raise TypeError("Cannot instantiate %r" % type(self))
+
+
+class _FinalTypingBase(_TypingBase):
+    """Internal mix-in class to prevent instantiation.
+
+    Prevents instantiation unless _root=True is given in class call.
+    It is used to create pseudo-singleton instances Any, Union, Optional, etc.
+    """
+
+    __slots__ = ()
+
+    def __new__(cls, *args, **kwds):
+        self = super(_FinalTypingBase, cls).__new__(cls, *args, **kwds)
+        if '_root' in kwds and kwds['_root'] is True:
+            return self
+        raise TypeError("Cannot instantiate %r" % cls)
+
+    def __reduce__(self):
+        return _trim_name(type(self).__name__)
+
+
+class _ForwardRef(_TypingBase):
+    """Internal wrapper to hold a forward reference."""
+
+    __slots__ = ('__forward_arg__', '__forward_code__',
+                 '__forward_evaluated__', '__forward_value__')
+
+    def __init__(self, arg):
+        super(_ForwardRef, self).__init__(arg)
+        if not isinstance(arg, basestring):
+            raise TypeError('Forward reference must be a string -- got %r' % (arg,))
+        try:
+            code = compile(arg, '<string>', 'eval')
+        except SyntaxError:
+            raise SyntaxError('Forward reference must be an expression -- got %r' %
+                              (arg,))
+        self.__forward_arg__ = arg
+        self.__forward_code__ = code
+        self.__forward_evaluated__ = False
+        self.__forward_value__ = None
+
+    def _eval_type(self, globalns, localns):
+        if not self.__forward_evaluated__ or localns is not globalns:
+            if globalns is None and localns is None:
+                globalns = localns = {}
+            elif globalns is None:
+                globalns = localns
+            elif localns is None:
+                localns = globalns
+            self.__forward_value__ = _type_check(
+                eval(self.__forward_code__, globalns, localns),
+                "Forward references must evaluate to types.")
+            self.__forward_evaluated__ = True
+        return self.__forward_value__
+
+    def __instancecheck__(self, obj):
+        raise TypeError("Forward references cannot be used with isinstance().")
+
+    def __subclasscheck__(self, cls):
+        raise TypeError("Forward references cannot be used with issubclass().")
+
+    def __repr__(self):
+        return '_ForwardRef(%r)' % (self.__forward_arg__,)
+
+
+class _TypeAlias(_TypingBase):
+    """Internal helper class for defining generic variants of concrete types.
+
+    Note that this is not a type; let's call it a pseudo-type.  It cannot
+    be used in instance and subclass checks in parameterized form, i.e.
+    ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning
+    ``False``.
+    """
+
+    __slots__ = ('name', 'type_var', 'impl_type', 'type_checker')
+
+
+    def __init__(self, name, type_var, impl_type, type_checker):
+        """Initializer.
+
+        Args:
+            name: The name, e.g. 'Pattern'.
+            type_var: The type parameter, e.g. AnyStr, or the
+                specific type, e.g. str.
+            impl_type: The implementation type.
+            type_checker: Function that takes an impl_type instance.
+                and returns a value that should be a type_var instance.
+        """
+        assert isinstance(name, basestring), repr(name)
+        assert isinstance(impl_type, type), repr(impl_type)
+        assert not isinstance(impl_type, TypingMeta), repr(impl_type)
+        assert isinstance(type_var, (type, _TypingBase)), repr(type_var)
+        self.name = name
+        self.type_var = type_var
+        self.impl_type = impl_type
+        self.type_checker = type_checker
+
+    def __repr__(self):
+        return "%s[%s]" % (self.name, _type_repr(self.type_var))
+
+    def __getitem__(self, parameter):
+        if not isinstance(self.type_var, TypeVar):
+            raise TypeError("%s cannot be further parameterized." % self)
+        if self.type_var.__constraints__ and isinstance(parameter, type):
+            if not issubclass(parameter, self.type_var.__constraints__):
+                raise TypeError("%s is not a valid substitution for %s." %
+                                (parameter, self.type_var))
+        if isinstance(parameter, TypeVar) and parameter is not self.type_var:
+            raise TypeError("%s cannot be re-parameterized." % self)
+        return self.__class__(self.name, parameter,
+                              self.impl_type, self.type_checker)
+
+    def __instancecheck__(self, obj):
+        if not isinstance(self.type_var, TypeVar):
+            raise TypeError("Parameterized type aliases cannot be used "
+                            "with isinstance().")
+        return isinstance(obj, self.impl_type)
+
+    def __subclasscheck__(self, cls):
+        if not isinstance(self.type_var, TypeVar):
+            raise TypeError("Parameterized type aliases cannot be used "
+                            "with issubclass().")
+        return issubclass(cls, self.impl_type)
+
+
+def _get_type_vars(types, tvars):
+    for t in types:
+        if isinstance(t, TypingMeta) or isinstance(t, _TypingBase):
+            t._get_type_vars(tvars)
+
+
+def _type_vars(types):
+    tvars = []
+    _get_type_vars(types, tvars)
+    return tuple(tvars)
+
+
+def _eval_type(t, globalns, localns):
+    if isinstance(t, TypingMeta) or isinstance(t, _TypingBase):
+        return t._eval_type(globalns, localns)
+    return t
+
+
+def _type_check(arg, msg):
+    """Check that the argument is a type, and return it (internal helper).
+
+    As a special case, accept None and return type(None) instead.
+    Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable.
+
+    The msg argument is a human-readable error message, e.g.
+
+        "Union[arg, ...]: arg should be a type."
+
+    We append the repr() of the actual value (truncated to 100 chars).
+    """
+    if arg is None:
+        return type(None)
+    if isinstance(arg, basestring):
+        arg = _ForwardRef(arg)
+    if (isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or
+        not isinstance(arg, (type, _TypingBase)) and not callable(arg)):
+        raise TypeError(msg + " Got %.100r." % (arg,))
+    # Bare Union etc. are not valid as type arguments
+    if (type(arg).__name__ in ('_Union', '_Optional')
+        and not getattr(arg, '__origin__', None)
+        or isinstance(arg, TypingMeta) and _gorg(arg) in (Generic, _Protocol)):
+        raise TypeError("Plain %s is not valid as type argument" % arg)
+    return arg
+
+
+def _type_repr(obj):
+    """Return the repr() of an object, special-casing types (internal helper).
+
+    If obj is a type, we return a shorter version than the default
+    type.__repr__, based on the module and qualified name, which is
+    typically enough to uniquely identify a type.  For everything
+    else, we fall back on repr(obj).
+    """
+    if isinstance(obj, type) and not isinstance(obj, TypingMeta):
+        if obj.__module__ == '__builtin__':
+            return _qualname(obj)
+        return '%s.%s' % (obj.__module__, _qualname(obj))
+    if obj is Ellipsis:
+        return('...')
+    if isinstance(obj, types.FunctionType):
+        return obj.__name__
+    return repr(obj)
+
+
+class ClassVarMeta(TypingMeta):
+    """Metaclass for _ClassVar"""
+
+    def __new__(cls, name, bases, namespace):
+        cls.assert_no_subclassing(bases)
+        self = super(ClassVarMeta, cls).__new__(cls, name, bases, namespace)
+        return self
+
+
+class _ClassVar(_FinalTypingBase):
+    """Special type construct to mark class variables.
+
+    An annotation wrapped in ClassVar indicates that a given
+    attribute is intended to be used as a class variable and
+    should not be set on instances of that class. Usage::
+
+      class Starship:
+          stats = {}  # type: ClassVar[Dict[str, int]] # class variable
+          damage = 10 # type: int                      # instance variable
+
+    ClassVar accepts only types and cannot be further subscribed.
+
+    Note that ClassVar is not a class itself, and should not
+    be used with isinstance() or issubclass().
+    """
+
+    __metaclass__ = ClassVarMeta
+    __slots__ = ('__type__',)
+
+    def __init__(self, tp=None, _root=False):
+        self.__type__ = tp
+
+    def __getitem__(self, item):
+        cls = type(self)
+        if self.__type__ is None:
+            return cls(_type_check(item,
+                       '{} accepts only types.'.format(cls.__name__[1:])),
+                       _root=True)
+        raise TypeError('{} cannot be further subscripted'
+                        .format(cls.__name__[1:]))
+
+    def _eval_type(self, globalns, localns):
+        return type(self)(_eval_type(self.__type__, globalns, localns),
+                          _root=True)
+
+    def __repr__(self):
+        r = super(_ClassVar, self).__repr__()
+        if self.__type__ is not None:
+            r += '[{}]'.format(_type_repr(self.__type__))
+        return r
+
+    def __hash__(self):
+        return hash((type(self).__name__, self.__type__))
+
+    def __eq__(self, other):
+        if not isinstance(other, _ClassVar):
+            return NotImplemented
+        if self.__type__ is not None:
+            return self.__type__ == other.__type__
+        return self is other
+
+ClassVar = _ClassVar(_root=True)
+
+
+class AnyMeta(TypingMeta):
+    """Metaclass for Any."""
+
+    def __new__(cls, name, bases, namespace):
+        cls.assert_no_subclassing(bases)
+        self = super(AnyMeta, cls).__new__(cls, name, bases, namespace)
+        return self
+
+
+class _Any(_FinalTypingBase):
+    """Special type indicating an unconstrained type.
+
+    - Any is compatible with every type.
+    - Any assumed to have all methods.
+    - All values assumed to be instances of Any.
+
+    Note that all the above statements are true from the point of view of
+    static type checkers. At runtime, Any should not be used with instance
+    or class checks.
+    """
+    __metaclass__ = AnyMeta
+    __slots__ = ()
+
+    def __instancecheck__(self, obj):
+        raise TypeError("Any cannot be used with isinstance().")
+
+    def __subclasscheck__(self, cls):
+        raise TypeError("Any cannot be used with issubclass().")
+
+
+Any = _Any(_root=True)
+
+
+class TypeVarMeta(TypingMeta):
+    def __new__(cls, name, bases, namespace):
+        cls.assert_no_subclassing(bases)
+        return super(TypeVarMeta, cls).__new__(cls, name, bases, namespace)
+
+
+class TypeVar(_TypingBase):
+    """Type variable.
+
+    Usage::
+
+      T = TypeVar('T')  # Can be anything
+      A = TypeVar('A', str, bytes)  # Must be str or bytes
+
+    Type variables exist primarily for the benefit of static type
+    checkers.  They serve as the parameters for generic types as well
+    as for generic function definitions.  See class Generic for more
+    information on generic types.  Generic functions work as follows:
+
+      def repeat(x: T, n: int) -> List[T]:
+          '''Return a list containing n references to x.'''
+          return [x]*n
+
+      def longest(x: A, y: A) -> A:
+          '''Return the longest of two strings.'''
+          return x if len(x) >= len(y) else y
+
+    The latter example's signature is essentially the overloading
+    of (str, str) -> str and (bytes, bytes) -> bytes.  Also note
+    that if the arguments are instances of some subclass of str,
+    the return type is still plain str.
+
+    At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError.
+
+    Type variables defined with covariant=True or contravariant=True
+    can be used do declare covariant or contravariant generic types.
+    See PEP 484 for more details. By default generic types are invariant
+    in all type variables.
+
+    Type variables can be introspected. e.g.:
+
+      T.__name__ == 'T'
+      T.__constraints__ == ()
+      T.__covariant__ == False
+      T.__contravariant__ = False
+      A.__constraints__ == (str, bytes)
+    """
+
+    __metaclass__ = TypeVarMeta
+    __slots__ = ('__name__', '__bound__', '__constraints__',
+                 '__covariant__', '__contravariant__')
+
+    def __init__(self, name, *constraints, **kwargs):
+        super(TypeVar, self).__init__(name, *constraints, **kwargs)
+        bound = kwargs.get('bound', None)
+        covariant = kwargs.get('covariant', False)
+        contravariant = kwargs.get('contravariant', False)
+        self.__name__ = name
+        if covariant and contravariant:
+            raise ValueError("Bivariant types are not supported.")
+        self.__covariant__ = bool(covariant)
+        self.__contravariant__ = bool(contravariant)
+        if constraints and bound is not None:
+            raise TypeError("Constraints cannot be combined with bound=...")
+        if constraints and len(constraints) == 1:
+            raise TypeError("A single constraint is not allowed")
+        msg = "TypeVar(name, constraint, ...): constraints must be types."
+        self.__constraints__ = tuple(_type_check(t, msg) for t in constraints)
+        if bound:
+            self.__bound__ = _type_check(bound, "Bound must be a type.")
+        else:
+            self.__bound__ = None
+
+    def _get_type_vars(self, tvars):
+        if self not in tvars:
+            tvars.append(self)
+
+    def __repr__(self):
+        if self.__covariant__:
+            prefix = '+'
+        elif self.__contravariant__:
+            prefix = '-'
+        else:
+            prefix = '~'
+        return prefix + self.__name__
+
+    def __instancecheck__(self, instance):
+        raise TypeError("Type variables cannot be used with isinstance().")
+
+    def __subclasscheck__(self, cls):
+        raise TypeError("Type variables cannot be used with issubclass().")
+
+
+# Some unconstrained type variables.  These are used by the container types.
+# (These are not for export.)
+T = TypeVar('T')  # Any type.
+KT = TypeVar('KT')  # Key type.
+VT = TypeVar('VT')  # Value type.
+T_co = TypeVar('T_co', covariant=True)  # Any type covariant containers.
+V_co = TypeVar('V_co', covariant=True)  # Any type covariant containers.
+VT_co = TypeVar('VT_co', covariant=True)  # Value type covariant containers.
+T_contra = TypeVar('T_contra', contravariant=True)  # Ditto contravariant.
+
+# A useful type variable with constraints.  This represents string types.
+# (This one *is* for export!)
+AnyStr = TypeVar('AnyStr', bytes, unicode)
+
+
+def _replace_arg(arg, tvars, args):
+    """An internal helper function: replace arg if it is a type variable
+    found in tvars with corresponding substitution from args or
+    with corresponding substitution sub-tree if arg is a generic type.
+    """
+
+    if tvars is None:
+        tvars = []
+    if hasattr(arg, '_subs_tree'):
+        return arg._subs_tree(tvars, args)
+    if isinstance(arg, TypeVar):
+        for i, tvar in enumerate(tvars):
+            if arg == tvar:
+                return args[i]
+    return arg
+
+
+def _subs_tree(cls, tvars=None, args=None):
+    """An internal helper function: calculate substitution tree
+    for generic cls after replacing its type parameters with
+    substitutions in tvars -> args (if any).
+    Repeat the same following __origin__'s.
+
+    Return a list of arguments with all possible substitutions
+    performed. Arguments that are generic classes themselves are represented
+    as tuples (so that no new classes are created by this function).
+    For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)]
+    """
+
+    if cls.__origin__ is None:
+        return cls
+    # Make of chain of origins (i.e. cls -> cls.__origin__)
+    current = cls.__origin__
+    orig_chain = []
+    while current.__origin__ is not None:
+        orig_chain.append(current)
+        current = current.__origin__
+    # Replace type variables in __args__ if asked ...
+    tree_args = []
+    for arg in cls.__args__:
+        tree_args.append(_replace_arg(arg, tvars, args))
+    # ... then continue replacing down the origin chain.
+    for ocls in orig_chain:
+        new_tree_args = []
+        for i, arg in enumerate(ocls.__args__):
+            new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args))
+        tree_args = new_tree_args
+    return tree_args
+
+
+def _remove_dups_flatten(parameters):
+    """An internal helper for Union creation and substitution: flatten Union's
+    among parameters, then remove duplicates and strict subclasses.
+    """
+
+    # Flatten out Union[Union[...], ...].
+    params = []
+    for p in parameters:
+        if isinstance(p, _Union) and p.__origin__ is Union:
+            params.extend(p.__args__)
+        elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union:
+            params.extend(p[1:])
+        else:
+            params.append(p)
+    # Weed out strict duplicates, preserving the first of each occurrence.
+    all_params = set(params)
+    if len(all_params) < len(params):
+        new_params = []
+        for t in params:
+            if t in all_params:
+                new_params.append(t)
+                all_params.remove(t)
+        params = new_params
+        assert not all_params, all_params
+    # Weed out subclasses.
+    # E.g. Union[int, Employee, Manager] == Union[int, Employee].
+    # If object is present it will be sole survivor among proper classes.
+    # Never discard type variables.
+    # (In particular, Union[str, AnyStr] != AnyStr.)
+    all_params = set(params)
+    for t1 in params:
+        if not isinstance(t1, type):
+            continue
+        if any(isinstance(t2, type) and issubclass(t1, t2)
+               for t2 in all_params - {t1}
+               if not (isinstance(t2, GenericMeta) and
+                       t2.__origin__ is not None)):
+            all_params.remove(t1)
+    return tuple(t for t in params if t in all_params)
+
+
+def _check_generic(cls, parameters):
+    # Check correct count for parameters of a generic cls (internal helper).
+    if not cls.__parameters__:
+        raise TypeError("%s is not a generic class" % repr(cls))
+    alen = len(parameters)
+    elen = len(cls.__parameters__)
+    if alen != elen:
+        raise TypeError("Too %s parameters for %s; actual %s, expected %s" %
+                        ("many" if alen > elen else "few", repr(cls), alen, elen))
+
+
+_cleanups = []
+
+
+def _tp_cache(func):
+    maxsize = 128
+    cache = {}
+    _cleanups.append(cache.clear)
+
+    @functools.wraps(func)
+    def inner(*args):
+        key = args
+        try:
+            return cache[key]
+        except TypeError:
+            # Assume it's an unhashable argument.
+            return func(*args)
+        except KeyError:
+            value = func(*args)
+            if len(cache) >= maxsize:
+                # If the cache grows too much, just start over.
+                cache.clear()
+            cache[key] = value
+            return value
+
+    return inner
+
+
+class UnionMeta(TypingMeta):
+    """Metaclass for Union."""
+
+    def __new__(cls, name, bases, namespace):
+        cls.assert_no_subclassing(bases)
+        return super(UnionMeta, cls).__new__(cls, name, bases, namespace)
+
+
+class _Union(_FinalTypingBase):
+    """Union type; Union[X, Y] means either X or Y.
+
+    To define a union, use e.g. Union[int, str].  Details:
+
+    - The arguments must be types and there must be at least one.
+
+    - None as an argument is a special case and is replaced by
+      type(None).
+
+    - Unions of unions are flattened, e.g.::
+
+        Union[Union[int, str], float] == Union[int, str, float]
+
+    - Unions of a single argument vanish, e.g.::
+
+        Union[int] == int  # The constructor actually returns int
+
+    - Redundant arguments are skipped, e.g.::
+
+        Union[int, str, int] == Union[int, str]
+
+    - When comparing unions, the argument order is ignored, e.g.::
+
+        Union[int, str] == Union[str, int]
+
+    - When two arguments have a subclass relationship, the least
+      derived argument is kept, e.g.::
+
+        class Employee: pass
+        class Manager(Employee): pass
+        Union[int, Employee, Manager] == Union[int, Employee]
+        Union[Manager, int, Employee] == Union[int, Employee]
+        Union[Employee, Manager] == Employee
+
+    - Similar for object::
+
+        Union[int, object] == object
+
+    - You cannot subclass or instantiate a union.
+
+    - You can use Optional[X] as a shorthand for Union[X, None].
+    """
+
+    __metaclass__ = UnionMeta
+    __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__')
+
+    def __new__(cls, parameters=None, origin=None, *args, **kwds):
+        self = super(_Union, cls).__new__(cls, parameters, origin, *args, **kwds)
+        if origin is None:
+            self.__parameters__ = None
+            self.__args__ = None
+            self.__origin__ = None
+            self.__tree_hash__ = hash(frozenset(('Union',)))
+            return self
+        if not isinstance(parameters, tuple):
+            raise TypeError("Expected parameters=<tuple>")
+        if origin is Union:
+            parameters = _remove_dups_flatten(parameters)
+            # It's not a union if there's only one type left.
+            if len(parameters) == 1:
+                return parameters[0]
+        self.__parameters__ = _type_vars(parameters)
+        self.__args__ = parameters
+        self.__origin__ = origin
+        # Pre-calculate the __hash__ on instantiation.
+        # This improves speed for complex substitutions.
+        subs_tree = self._subs_tree()
+        if isinstance(subs_tree, tuple):
+            self.__tree_hash__ = hash(frozenset(subs_tree))
+        else:
+            self.__tree_hash__ = hash(subs_tree)
+        return self
+
+    def _eval_type(self, globalns, localns):
+        if self.__args__ is None:
+            return self
+        ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__)
+        ev_origin = _eval_type(self.__origin__, globalns, localns)
+        if ev_args == self.__args__ and ev_origin == self.__origin__:
+            # Everything is already evaluated.
+            return self
+        return self.__class__(ev_args, ev_origin, _root=True)
+
+    def _get_type_vars(self, tvars):
+        if self.__origin__ and self.__parameters__:
+            _get_type_vars(self.__parameters__, tvars)
+
+    def __repr__(self):
+        if self.__origin__ is None:
+            return super(_Union, self).__repr__()
+        tree = self._subs_tree()
+        if not isinstance(tree, tuple):
+            return repr(tree)
+        return tree[0]._tree_repr(tree)
+
+    def _tree_repr(self, tree):
+        arg_list = []
+        for arg in tree[1:]:
+            if not isinstance(arg, tuple):
+                arg_list.append(_type_repr(arg))
+            else:
+                arg_list.append(arg[0]._tree_repr(arg))
+        return super(_Union, self).__repr__() + '[%s]' % ', '.join(arg_list)
+
+    @_tp_cache
+    def __getitem__(self, parameters):
+        if parameters == ():
+            raise TypeError("Cannot take a Union of no types.")
+        if not isinstance(parameters, tuple):
+            parameters = (parameters,)
+        if self.__origin__ is None:
+            msg = "Union[arg, ...]: each arg must be a type."
+        else:
+            msg = "Parameters to generic types must be types."
+        parameters = tuple(_type_check(p, msg) for p in parameters)
+        if self is not Union:
+            _check_generic(self, parameters)
+        return self.__class__(parameters, origin=self, _root=True)
+
+    def _subs_tree(self, tvars=None, args=None):
+        if self is Union:
+            return Union  # Nothing to substitute
+        tree_args = _subs_tree(self, tvars, args)
+        tree_args = _remove_dups_flatten(tree_args)
+        if len(tree_args) == 1:
+            return tree_args[0]  # Union of a single type is that type
+        return (Union,) + tree_args
+
+    def __eq__(self, other):
+        if not isinstance(other, _Union):
+            return self._subs_tree() == other
+        return self.__tree_hash__ == other.__tree_hash__
+
+    def __hash__(self):
+        return self.__tree_hash__
+
+    def __instancecheck__(self, obj):
+        raise TypeError("Unions cannot be used with isinstance().")
+
+    def __subclasscheck__(self, cls):
+        raise TypeError("Unions cannot be used with issubclass().")
+
+
+Union = _Union(_root=True)
+
+
+class OptionalMeta(TypingMeta):
+    """Metaclass for Optional."""
+
+    def __new__(cls, name, bases, namespace):
+        cls.assert_no_subclassing(bases)
+        return super(OptionalMeta, cls).__new__(cls, name, bases, namespace)
+
+
+class _Optional(_FinalTypingBase):
+    """Optional type.
+
+    Optional[X] is equivalent to Union[X, None].
+    """
+
+    __metaclass__ = OptionalMeta
+    __slots__ = ()
+
+    @_tp_cache
+    def __getitem__(self, arg):
+        arg = _type_check(arg, "Optional[t] requires a single type.")
+        return Union[arg, type(None)]
+
+
+Optional = _Optional(_root=True)
+
+
+def _gorg(a):
+    """Return the farthest origin of a generic class (internal helper)."""
+    assert isinstance(a, GenericMeta)
+    while a.__origin__ is not None:
+        a = a.__origin__
+    return a
+
+
+def _geqv(a, b):
+    """Return whether two generic classes are equivalent (internal helper).
+
+    The intention is to consider generic class X and any of its
+    parameterized forms (X[T], X[int], etc.) as equivalent.
+
+    However, X is not equivalent to a subclass of X.
+
+    The relation is reflexive, symmetric and transitive.
+    """
+    assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta)
+    # Reduce each to its origin.
+    return _gorg(a) is _gorg(b)
+
+
+def _next_in_mro(cls):
+    """Helper for Generic.__new__.
+
+    Returns the class after the last occurrence of Generic or
+    Generic[...] in cls.__mro__.
+    """
+    next_in_mro = object
+    # Look for the last occurrence of Generic or Generic[...].
+    for i, c in enumerate(cls.__mro__[:-1]):
+        if isinstance(c, GenericMeta) and _gorg(c) is Generic:
+            next_in_mro = cls.__mro__[i+1]
+    return next_in_mro
+
+
+def _valid_for_check(cls):
+    """An internal helper to prohibit isinstance([1], List[str]) etc."""
+    if cls is Generic:
+        raise TypeError("Class %r cannot be used with class "
+                        "or instance checks" % cls)
+    if (cls.__origin__ is not None and
+        sys._getframe(3).f_globals['__name__'] not in ['abc', 'functools']):
+        raise TypeError("Parameterized generics cannot be used with class "
+                        "or instance checks")
+
+
+def _make_subclasshook(cls):
+    """Construct a __subclasshook__ callable that incorporates
+    the associated __extra__ class in subclass checks performed
+    against cls.
+    """
+    if isinstance(cls.__extra__, abc.ABCMeta):
+        # The logic mirrors that of ABCMeta.__subclasscheck__.
+        # Registered classes need not be checked here because
+        # cls and its extra share the same _abc_registry.
+        def __extrahook__(cls, subclass):
+            _valid_for_check(cls)
+            res = cls.__extra__.__subclasshook__(subclass)
+            if res is not NotImplemented:
+                return res
+            if cls.__extra__ in getattr(subclass, '__mro__', ()):
+                return True
+            for scls in cls.__extra__.__subclasses__():
+                if isinstance(scls, GenericMeta):
+                    continue
+                if issubclass(subclass, scls):
+                    return True
+            return NotImplemented
+    else:
+        # For non-ABC extras we'll just call issubclass().
+        def __extrahook__(cls, subclass):
+            _valid_for_check(cls)
+            if cls.__extra__ and issubclass(subclass, cls.__extra__):
+                return True
+            return NotImplemented
+    return classmethod(__extrahook__)
+
+
+class GenericMeta(TypingMeta, abc.ABCMeta):
+    """Metaclass for generic types."""
+
+    def __new__(cls, name, bases, namespace,
+                tvars=None, args=None, origin=None, extra=None, orig_bases=None):
+        if tvars is not None:
+            # Called from __getitem__() below.
+            assert origin is not None
+            assert all(isinstance(t, TypeVar) for t in tvars), tvars
+        else:
+            # Called from class statement.
+            assert tvars is None, tvars
+            assert args is None, args
+            assert origin is None, origin
+
+            # Get the full set of tvars from the bases.
+            tvars = _type_vars(bases)
+            # Look for Generic[T1, ..., Tn].
+            # If found, tvars must be a subset of it.
+            # If not found, tvars is it.
+            # Also check for and reject plain Generic,
+            # and reject multiple Generic[...].
+            gvars = None
+            for base in bases:
+                if base is Generic:
+                    raise TypeError("Cannot inherit from plain Generic")
+                if (isinstance(base, GenericMeta) and
+                        base.__origin__ is Generic):
+                    if gvars is not None:
+                        raise TypeError(
+                            "Cannot inherit from Generic[...] multiple types.")
+                    gvars = base.__parameters__
+            if gvars is None:
+                gvars = tvars
+            else:
+                tvarset = set(tvars)
+                gvarset = set(gvars)
+                if not tvarset <= gvarset:
+                    raise TypeError(
+                        "Some type variables (%s) "
+                        "are not listed in Generic[%s]" %
+                        (", ".join(str(t) for t in tvars if t not in gvarset),
+                         ", ".join(str(g) for g in gvars)))
+                tvars = gvars
+
+        initial_bases = bases
+        if extra is None:
+            extra = namespace.get('__extra__')
+        if extra is not None and type(extra) is abc.ABCMeta and extra not in bases:
+            bases = (extra,) + bases
+        bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b for b in bases)
+
+        # remove bare Generic from bases if there are other generic bases
+        if any(isinstance(b, GenericMeta) and b is not Generic for b in bases):
+            bases = tuple(b for b in bases if b is not Generic)
+        self = super(GenericMeta, cls).__new__(cls, name, bases, namespace)
+
+        self.__parameters__ = tvars
+        # Be prepared that GenericMeta will be subclassed by TupleMeta
+        # and CallableMeta, those two allow ..., (), or [] in __args___.
+        self.__args__ = tuple(Ellipsis if a is _TypingEllipsis else
+                              () if a is _TypingEmpty else
+                              a for a in args) if args else None
+        self.__origin__ = origin
+        self.__extra__ = extra
+        # Speed hack (https://github.com/python/typing/issues/196).
+        self.__next_in_mro__ = _next_in_mro(self)
+        # Preserve base classes on subclassing (__bases__ are type erased now).
+        if orig_bases is None:
+            self.__orig_bases__ = initial_bases
+
+        # This allows unparameterized generic collections to be used
+        # with issubclass() and isinstance() in the same way as their
+        # collections.abc counterparts (e.g., isinstance([], Iterable)).
+        if ('__subclasshook__' not in namespace and extra  # allow overriding
+            or hasattr(self.__subclasshook__, '__name__') and
+            self.__subclasshook__.__name__ == '__extrahook__'):
+            self.__subclasshook__ = _make_subclasshook(self)
+
+        if origin and hasattr(origin, '__qualname__'):  # Fix for Python 3.2.
+            self.__qualname__ = origin.__qualname__
+        self.__tree_hash__ = hash(self._subs_tree()) if origin else hash((self.__name__,))
+        return self
+
+    def __init__(self, *args, **kwargs):
+        super(GenericMeta, self).__init__(*args, **kwargs)
+        if isinstance(self.__extra__, abc.ABCMeta):
+            self._abc_registry = self.__extra__._abc_registry
+
+    def _get_type_vars(self, tvars):
+        if self.__origin__ and self.__parameters__:
+            _get_type_vars(self.__parameters__, tvars)
+
+    def _eval_type(self, globalns, localns):
+        ev_origin = (self.__origin__._eval_type(globalns, localns)
+                     if self.__origin__ else None)
+        ev_args = tuple(_eval_type(a, globalns, localns) for a
+                        in self.__args__) if self.__args__ else None
+        if ev_origin == self.__origin__ and ev_args == self.__args__:
+            return self
+        return self.__class__(self.__name__,
+                              self.__bases__,
+                              dict(self.__dict__),
+                              tvars=_type_vars(ev_args) if ev_args else None,
+                              args=ev_args,
+                              origin=ev_origin,
+                              extra=self.__extra__,
+                              orig_bases=self.__orig_bases__)
+
+    def __repr__(self):
+        if self.__origin__ is None:
+            return super(GenericMeta, self).__repr__()
+        return self._tree_repr(self._subs_tree())
+
+    def _tree_repr(self, tree):
+        arg_list = []
+        for arg in tree[1:]:
+            if arg == ():
+                arg_list.append('()')
+            elif not isinstance(arg, tuple):
+                arg_list.append(_type_repr(arg))
+            else:
+                arg_list.append(arg[0]._tree_repr(arg))
+        return super(GenericMeta, self).__repr__() + '[%s]' % ', '.join(arg_list)
+
+    def _subs_tree(self, tvars=None, args=None):
+        if self.__origin__ is None:
+            return self
+        tree_args = _subs_tree(self, tvars, args)
+        return (_gorg(self),) + tuple(tree_args)
+
+    def __eq__(self, other):
+        if not isinstance(other, GenericMeta):
+            return NotImplemented
+        if self.__origin__ is None or other.__origin__ is None:
+            return self is other
+        return self.__tree_hash__ == other.__tree_hash__
+
+    def __hash__(self):
+        return self.__tree_hash__
+
+    @_tp_cache
+    def __getitem__(self, params):
+        if not isinstance(params, tuple):
+            params = (params,)
+        if not params and not _gorg(self) is Tuple:
+            raise TypeError(
+                "Parameter list to %s[...] cannot be empty" % _qualname(self))
+        msg = "Parameters to generic types must be types."
+        params = tuple(_type_check(p, msg) for p in params)
+        if self is Generic:
+            # Generic can only be subscripted with unique type variables.
+            if not all(isinstance(p, TypeVar) for p in params):
+                raise TypeError(
+                    "Parameters to Generic[...] must all be type variables")
+            if len(set(params)) != len(params):
+                raise TypeError(
+                    "Parameters to Generic[...] must all be unique")
+            tvars = params
+            args = params
+        elif self in (Tuple, Callable):
+            tvars = _type_vars(params)
+            args = params
+        elif self is _Protocol:
+            # _Protocol is internal, don't check anything.
+            tvars = params
+            args = params
+        elif self.__origin__ in (Generic, _Protocol):
+            # Can't subscript Generic[...] or _Protocol[...].
+            raise TypeError("Cannot subscript already-subscripted %s" %
+                            repr(self))
+        else:
+            # Subscripting a regular Generic subclass.
+            _check_generic(self, params)
+            tvars = _type_vars(params)
+            args = params
+        return self.__class__(self.__name__,
+                              self.__bases__,
+                              dict(self.__dict__),
+                              tvars=tvars,
+                              args=args,
+                              origin=self,
+                              extra=self.__extra__,
+                              orig_bases=self.__orig_bases__)
+
+    def __instancecheck__(self, instance):
+        # Since we extend ABC.__subclasscheck__ and
+        # ABC.__instancecheck__ inlines the cache checking done by the
+        # latter, we must extend __instancecheck__ too. For simplicity
+        # we just skip the cache check -- instance checks for generic
+        # classes are supposed to be rare anyways.
+        if not isinstance(instance, type):
+            return issubclass(instance.__class__, self)
+        return False
+
+    def __copy__(self):
+        return self.__class__(self.__name__, self.__bases__, dict(self.__dict__),
+                              self.__parameters__, self.__args__, self.__origin__,
+                              self.__extra__, self.__orig_bases__)
+
+
+# Prevent checks for Generic to crash when defining Generic.
+Generic = None
+
+
+def _generic_new(base_cls, cls, *args, **kwds):
+    # Assure type is erased on instantiation,
+    # but attempt to store it in __orig_class__
+    if cls.__origin__ is None:
+        return base_cls.__new__(cls)
+    else:
+        origin = _gorg(cls)
+        obj = base_cls.__new__(origin)
+        try:
+            obj.__orig_class__ = cls
+        except AttributeError:
+            pass
+        obj.__init__(*args, **kwds)
+        return obj
+
+
+class Generic(object):
+    """Abstract base class for generic types.
+
+    A generic type is typically declared by inheriting from
+    this class parameterized with one or more type variables.
+    For example, a generic mapping type might be defined as::
+
+      class Mapping(Generic[KT, VT]):
+          def __getitem__(self, key: KT) -> VT:
+              ...
+          # Etc.
+
+    This class can then be used as follows::
+
+      def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT:
+          try:
+              return mapping[key]
+          except KeyError:
+              return default
+    """
+
+    __metaclass__ = GenericMeta
+    __slots__ = ()
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, Generic):
+            raise TypeError("Type Generic cannot be instantiated; "
+                            "it can be used only as a base class")
+        return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
+
+
+class _TypingEmpty(object):
+    """Internal placeholder for () or []. Used by TupleMeta and CallableMeta
+    to allow empty list/tuple in specific places, without allowing them
+    to sneak in where prohibited.
+    """
+
+
+class _TypingEllipsis(object):
+    """Internal placeholder for ... (ellipsis)."""
+
+
+class TupleMeta(GenericMeta):
+    """Metaclass for Tuple (internal)."""
+
+    @_tp_cache
+    def __getitem__(self, parameters):
+        if self.__origin__ is not None or not _geqv(self, Tuple):
+            # Normal generic rules apply if this is not the first subscription
+            # or a subscription of a subclass.
+            return super(TupleMeta, self).__getitem__(parameters)
+        if parameters == ():
+            return super(TupleMeta, self).__getitem__((_TypingEmpty,))
+        if not isinstance(parameters, tuple):
+            parameters = (parameters,)
+        if len(parameters) == 2 and parameters[1] is Ellipsis:
+            msg = "Tuple[t, ...]: t must be a type."
+            p = _type_check(parameters[0], msg)
+            return super(TupleMeta, self).__getitem__((p, _TypingEllipsis))
+        msg = "Tuple[t0, t1, ...]: each t must be a type."
+        parameters = tuple(_type_check(p, msg) for p in parameters)
+        return super(TupleMeta, self).__getitem__(parameters)
+
+    def __instancecheck__(self, obj):
+        if self.__args__ == None:
+            return isinstance(obj, tuple)
+        raise TypeError("Parameterized Tuple cannot be used "
+                        "with isinstance().")
+
+    def __subclasscheck__(self, cls):
+        if self.__args__ == None:
+            return issubclass(cls, tuple)
+        raise TypeError("Parameterized Tuple cannot be used "
+                        "with issubclass().")
+
+
+class Tuple(tuple):
+    """Tuple type; Tuple[X, Y] is the cross-product type of X and Y.
+
+    Example: Tuple[T1, T2] is a tuple of two elements corresponding
+    to type variables T1 and T2.  Tuple[int, float, str] is a tuple
+    of an int, a float and a string.
+
+    To specify a variable-length tuple of homogeneous type, use Tuple[T, ...].
+    """
+
+    __metaclass__ = TupleMeta
+    __extra__ = tuple
+    __slots__ = ()
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, Tuple):
+            raise TypeError("Type Tuple cannot be instantiated; "
+                            "use tuple() instead")
+        return _generic_new(tuple, cls, *args, **kwds)
+
+
+class CallableMeta(GenericMeta):
+    """ Metaclass for Callable."""
+
+    def __repr__(self):
+        if self.__origin__ is None:
+            return super(CallableMeta, self).__repr__()
+        return self._tree_repr(self._subs_tree())
+
+    def _tree_repr(self, tree):
+        if _gorg(self) is not Callable:
+            return super(CallableMeta, self)._tree_repr(tree)
+        # For actual Callable (not its subclass) we override
+        # super(CallableMeta, self)._tree_repr() for nice formatting.
+        arg_list = []
+        for arg in tree[1:]:
+            if not isinstance(arg, tuple):
+                arg_list.append(_type_repr(arg))
+            else:
+                arg_list.append(arg[0]._tree_repr(arg))
+        if arg_list[0] == '...':
+            return repr(tree[0]) + '[..., %s]' % arg_list[1]
+        return (repr(tree[0]) +
+                '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1]))
+
+    def __getitem__(self, parameters):
+        """A thin wrapper around __getitem_inner__ to provide the latter
+        with hashable arguments to improve speed.
+        """
+
+        if  self.__origin__ is not None or not _geqv(self, Callable):
+            return super(CallableMeta, self).__getitem__(parameters)
+        if not isinstance(parameters, tuple) or len(parameters) != 2:
+            raise TypeError("Callable must be used as "
+                            "Callable[[arg, ...], result].")
+        args, result = parameters
+        if args is Ellipsis:
+            parameters = (Ellipsis, result)
+        else:
+            if not isinstance(args, list):
+                raise TypeError("Callable[args, result]: args must be a list."
+                                " Got %.100r." % (args,))
+            parameters = (tuple(args), result)
+        return self.__getitem_inner__(parameters)
+
+    @_tp_cache
+    def __getitem_inner__(self, parameters):
+        args, result = parameters
+        msg = "Callable[args, result]: result must be a type."
+        result = _type_check(result, msg)
+        if args is Ellipsis:
+            return super(CallableMeta, self).__getitem__((_TypingEllipsis, result))
+        msg = "Callable[[arg, ...], result]: each arg must be a type."
+        args = tuple(_type_check(arg, msg) for arg in args)
+        parameters = args + (result,)
+        return super(CallableMeta, self).__getitem__(parameters)
+
+
+class Callable(object):
+    """Callable type; Callable[[int], str] is a function of (int) -> str.
+
+    The subscription syntax must always be used with exactly two
+    values: the argument list and the return type.  The argument list
+    must be a list of types or ellipsis; the return type must be a single type.
+
+    There is no syntax to indicate optional or keyword arguments,
+    such function types are rarely used as callback types.
+    """
+
+    __metaclass__ = CallableMeta
+    __extra__ = collections_abc.Callable
+    __slots__ = ()
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, Callable):
+            raise TypeError("Type Callable cannot be instantiated; "
+                            "use a non-abstract subclass instead")
+        return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
+
+
+def cast(typ, val):
+    """Cast a value to a type.
+
+    This returns the value unchanged.  To the type checker this
+    signals that the return value has the designated type, but at
+    runtime we intentionally don't check anything (we want this
+    to be as fast as possible).
+    """
+    return val
+
+
+def _get_defaults(func):
+    """Internal helper to extract the default arguments, by name."""
+    code = func.__code__
+    pos_count = code.co_argcount
+    arg_names = code.co_varnames
+    arg_names = arg_names[:pos_count]
+    defaults = func.__defaults__ or ()
+    kwdefaults = func.__kwdefaults__
+    res = dict(kwdefaults) if kwdefaults else {}
+    pos_offset = pos_count - len(defaults)
+    for name, value in zip(arg_names[pos_offset:], defaults):
+        assert name not in res
+        res[name] = value
+    return res
+
+
+def get_type_hints(obj, globalns=None, localns=None):
+    """In Python 2 this is not supported and always returns None."""
+    return None
+
+
+def no_type_check(arg):
+    """Decorator to indicate that annotations are not type hints.
+
+    The argument must be a class or function; if it is a class, it
+    applies recursively to all methods and classes defined in that class
+    (but not to methods defined in its superclasses or subclasses).
+
+    This mutates the function(s) or class(es) in place.
+    """
+    if isinstance(arg, type):
+        arg_attrs = arg.__dict__.copy()
+        for attr, val in arg.__dict__.items():
+            if val in arg.__bases__:
+                arg_attrs.pop(attr)
+        for obj in arg_attrs.values():
+            if isinstance(obj, types.FunctionType):
+                obj.__no_type_check__ = True
+            if isinstance(obj, type):
+                no_type_check(obj)
+    try:
+        arg.__no_type_check__ = True
+    except TypeError: # built-in classes
+        pass
+    return arg
+
+
+def no_type_check_decorator(decorator):
+    """Decorator to give another decorator the @no_type_check effect.
+
+    This wraps the decorator with something that wraps the decorated
+    function in @no_type_check.
+    """
+
+    @functools.wraps(decorator)
+    def wrapped_decorator(*args, **kwds):
+        func = decorator(*args, **kwds)
+        func = no_type_check(func)
+        return func
+
+    return wrapped_decorator
+
+
+def _overload_dummy(*args, **kwds):
+    """Helper for @overload to raise when called."""
+    raise NotImplementedError(
+        "You should not call an overloaded function. "
+        "A series of @overload-decorated functions "
+        "outside a stub module should always be followed "
+        "by an implementation that is not @overload-ed.")
+
+
+def overload(func):
+    """Decorator for overloaded functions/methods.
+
+    In a stub file, place two or more stub definitions for the same
+    function in a row, each decorated with @overload.  For example:
+
+      @overload
+      def utf8(value: None) -> None: ...
+      @overload
+      def utf8(value: bytes) -> bytes: ...
+      @overload
+      def utf8(value: str) -> bytes: ...
+
+    In a non-stub file (i.e. a regular .py file), do the same but
+    follow it with an implementation.  The implementation should *not*
+    be decorated with @overload.  For example:
+
+      @overload
+      def utf8(value: None) -> None: ...
+      @overload
+      def utf8(value: bytes) -> bytes: ...
+      @overload
+      def utf8(value: str) -> bytes: ...
+      def utf8(value):
+          # implementation goes here
+    """
+    return _overload_dummy
+
+
+class _ProtocolMeta(GenericMeta):
+    """Internal metaclass for _Protocol.
+
+    This exists so _Protocol classes can be generic without deriving
+    from Generic.
+    """
+
+    def __instancecheck__(self, obj):
+        if _Protocol not in self.__bases__:
+            return super(_ProtocolMeta, self).__instancecheck__(obj)
+        raise TypeError("Protocols cannot be used with isinstance().")
+
+    def __subclasscheck__(self, cls):
+        if not self._is_protocol:
+            # No structural checks since this isn't a protocol.
+            return NotImplemented
+
+        if self is _Protocol:
+            # Every class is a subclass of the empty protocol.
+            return True
+
+        # Find all attributes defined in the protocol.
+        attrs = self._get_protocol_attrs()
+
+        for attr in attrs:
+            if not any(attr in d.__dict__ for d in cls.__mro__):
+                return False
+        return True
+
+    def _get_protocol_attrs(self):
+        # Get all Protocol base classes.
+        protocol_bases = []
+        for c in self.__mro__:
+            if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol':
+                protocol_bases.append(c)
+
+        # Get attributes included in protocol.
+        attrs = set()
+        for base in protocol_bases:
+            for attr in base.__dict__.keys():
+                # Include attributes not defined in any non-protocol bases.
+                for c in self.__mro__:
+                    if (c is not base and attr in c.__dict__ and
+                            not getattr(c, '_is_protocol', False)):
+                        break
+                else:
+                    if (not attr.startswith('_abc_') and
+                            attr != '__abstractmethods__' and
+                            attr != '_is_protocol' and
+                            attr != '__dict__' and
+                            attr != '__args__' and
+                            attr != '__slots__' and
+                            attr != '_get_protocol_attrs' and
+                            attr != '__next_in_mro__' and
+                            attr != '__parameters__' and
+                            attr != '__origin__' and
+                            attr != '__orig_bases__' and
+                            attr != '__extra__' and
+                            attr != '__tree_hash__' and
+                            attr != '__module__'):
+                        attrs.add(attr)
+
+        return attrs
+
+
+class _Protocol(object):
+    """Internal base class for protocol classes.
+
+    This implements a simple-minded structural issubclass check
+    (similar but more general than the one-offs in collections.abc
+    such as Hashable).
+    """
+
+    __metaclass__ = _ProtocolMeta
+    __slots__ = ()
+
+    _is_protocol = True
+
+
+# Various ABCs mimicking those in collections.abc.
+# A few are simply re-exported for completeness.
+
+Hashable = collections_abc.Hashable  # Not generic.
+
+
+class Iterable(Generic[T_co]):
+    __slots__ = ()
+    __extra__ = collections_abc.Iterable
+
+
+class Iterator(Iterable[T_co]):
+    __slots__ = ()
+    __extra__ = collections_abc.Iterator
+
+
+class SupportsInt(_Protocol):
+    __slots__ = ()
+
+    @abstractmethod
+    def __int__(self):
+        pass
+
+
+class SupportsFloat(_Protocol):
+    __slots__ = ()
+
+    @abstractmethod
+    def __float__(self):
+        pass
+
+
+class SupportsComplex(_Protocol):
+    __slots__ = ()
+
+    @abstractmethod
+    def __complex__(self):
+        pass
+
+
+class SupportsAbs(_Protocol[T_co]):
+    __slots__ = ()
+
+    @abstractmethod
+    def __abs__(self):
+        pass
+
+
+if hasattr(collections_abc, 'Reversible'):
+    class Reversible(Iterable[T_co]):
+        __slots__ = ()
+        __extra__ = collections_abc.Reversible
+else:
+    class Reversible(_Protocol[T_co]):
+        __slots__ = ()
+
+        @abstractmethod
+        def __reversed__(self):
+            pass
+
+
+Sized = collections_abc.Sized  # Not generic.
+
+
+class Container(Generic[T_co]):
+    __slots__ = ()
+    __extra__ = collections_abc.Container
+
+
+# Callable was defined earlier.
+
+
+class AbstractSet(Sized, Iterable[T_co], Container[T_co]):
+    __slots__ = ()
+    __extra__ = collections_abc.Set
+
+
+class MutableSet(AbstractSet[T]):
+    __slots__ = ()
+    __extra__ = collections_abc.MutableSet
+
+
+# NOTE: It is only covariant in the value type.
+class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co]):
+    __slots__ = ()
+    __extra__ = collections_abc.Mapping
+
+
+class MutableMapping(Mapping[KT, VT]):
+    __slots__ = ()
+    __extra__ = collections_abc.MutableMapping
+
+
+if hasattr(collections_abc, 'Reversible'):
+    class Sequence(Sized, Reversible[T_co], Container[T_co]):
+        __slots__ = ()
+        __extra__ = collections_abc.Sequence
+else:
+    class Sequence(Sized, Iterable[T_co], Container[T_co]):
+        __slots__ = ()
+        __extra__ = collections_abc.Sequence
+
+
+class MutableSequence(Sequence[T]):
+    __slots__ = ()
+    __extra__ = collections_abc.MutableSequence
+
+
+class ByteString(Sequence[int]):
+    pass
+
+
+ByteString.register(str)
+ByteString.register(bytearray)
+
+
+class List(list, MutableSequence[T]):
+    __slots__ = ()
+    __extra__ = list
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, List):
+            raise TypeError("Type List cannot be instantiated; "
+                            "use list() instead")
+        return _generic_new(list, cls, *args, **kwds)
+
+
+class Set(set, MutableSet[T]):
+    __slots__ = ()
+    __extra__ = set
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, Set):
+            raise TypeError("Type Set cannot be instantiated; "
+                            "use set() instead")
+        return _generic_new(set, cls, *args, **kwds)
+
+
+class FrozenSet(frozenset, AbstractSet[T_co]):
+    __slots__ = ()
+    __extra__ = frozenset
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, FrozenSet):
+            raise TypeError("Type FrozenSet cannot be instantiated; "
+                            "use frozenset() instead")
+        return _generic_new(frozenset, cls, *args, **kwds)
+
+
+class MappingView(Sized, Iterable[T_co]):
+    __slots__ = ()
+    __extra__ = collections_abc.MappingView
+
+
+class KeysView(MappingView[KT], AbstractSet[KT]):
+    __slots__ = ()
+    __extra__ = collections_abc.KeysView
+
+
+class ItemsView(MappingView[Tuple[KT, VT_co]],
+                AbstractSet[Tuple[KT, VT_co]],
+                Generic[KT, VT_co]):
+    __slots__ = ()
+    __extra__ = collections_abc.ItemsView
+
+
+class ValuesView(MappingView[VT_co]):
+    __slots__ = ()
+    __extra__ = collections_abc.ValuesView
+
+
+class Dict(dict, MutableMapping[KT, VT]):
+    __slots__ = ()
+    __extra__ = dict
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, Dict):
+            raise TypeError("Type Dict cannot be instantiated; "
+                            "use dict() instead")
+        return _generic_new(dict, cls, *args, **kwds)
+
+
+class DefaultDict(collections.defaultdict, MutableMapping[KT, VT]):
+    __slots__ = ()
+    __extra__ = collections.defaultdict
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, DefaultDict):
+            raise TypeError("Type DefaultDict cannot be instantiated; "
+                            "use collections.defaultdict() instead")
+        return _generic_new(collections.defaultdict, cls, *args, **kwds)
+
+
+# Determine what base class to use for Generator.
+if hasattr(collections_abc, 'Generator'):
+    # Sufficiently recent versions of 3.5 have a Generator ABC.
+    _G_base = collections_abc.Generator
+else:
+    # Fall back on the exact type.
+    _G_base = types.GeneratorType
+
+
+class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co]):
+    __slots__ = ()
+    __extra__ = _G_base
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, Generator):
+            raise TypeError("Type Generator cannot be instantiated; "
+                            "create a subclass instead")
+        return _generic_new(_G_base, cls, *args, **kwds)
+
+
+# Internal type variable used for Type[].
+CT_co = TypeVar('CT_co', covariant=True, bound=type)
+
+
+# This is not a real generic class.  Don't use outside annotations.
+class Type(Generic[CT_co]):
+    """A special construct usable to annotate class objects.
+
+    For example, suppose we have the following classes::
+
+      class User: ...  # Abstract base for User classes
+      class BasicUser(User): ...
+      class ProUser(User): ...
+      class TeamUser(User): ...
+
+    And a function that takes a class argument that's a subclass of
+    User and returns an instance of the corresponding class::
+
+      U = TypeVar('U', bound=User)
+      def new_user(user_class: Type[U]) -> U:
+          user = user_class()
+          # (Here we could write the user object to a database)
+          return user
+
+      joe = new_user(BasicUser)
+
+    At this point the type checker knows that joe has type BasicUser.
+    """
+    __slots__ = ()
+    __extra__ = type
+
+
+def NamedTuple(typename, fields):
+    """Typed version of namedtuple.
+
+    Usage::
+
+        Employee = typing.NamedTuple('Employee', [('name', str), 'id', int)])
+
+    This is equivalent to::
+
+        Employee = collections.namedtuple('Employee', ['name', 'id'])
+
+    The resulting class has one extra attribute: _field_types,
+    giving a dict mapping field names to types.  (The field names
+    are in the _fields attribute, which is part of the namedtuple
+    API.)
+    """
+    fields = [(n, t) for n, t in fields]
+    cls = collections.namedtuple(typename, [n for n, t in fields])
+    cls._field_types = dict(fields)
+    # Set the module to the caller's module (otherwise it'd be 'typing').
+    try:
+        cls.__module__ = sys._getframe(1).f_globals.get('__name__', '__main__')
+    except (AttributeError, ValueError):
+        pass
+    return cls
+
+
+def NewType(name, tp):
+    """NewType creates simple unique types with almost zero
+    runtime overhead. NewType(name, tp) is considered a subtype of tp
+    by static type checkers. At runtime, NewType(name, tp) returns
+    a dummy function that simply returns its argument. Usage::
+
+        UserId = NewType('UserId', int)
+
+        def name_by_id(user_id):
+            # type: (UserId) -> str
+            ...
+
+        UserId('user')          # Fails type check
+
+        name_by_id(42)          # Fails type check
+        name_by_id(UserId(42))  # OK
+
+        num = UserId(5) + 1     # type: int
+    """
+
+    def new_type(x):
+        return x
+
+    # Some versions of Python 2 complain because of making all strings unicode
+    new_type.__name__ = str(name)
+    new_type.__supertype__ = tp
+    return new_type
+
+
+# Python-version-specific alias (Python 2: unicode; Python 3: str)
+Text = unicode
+
+
+# Constant that's True when type checking, but False here.
+TYPE_CHECKING = False
+
+
+class IO(Generic[AnyStr]):
+    """Generic base class for TextIO and BinaryIO.
+
+    This is an abstract, generic version of the return of open().
+
+    NOTE: This does not distinguish between the different possible
+    classes (text vs. binary, read vs. write vs. read/write,
+    append-only, unbuffered).  The TextIO and BinaryIO subclasses
+    below capture the distinctions between text vs. binary, which is
+    pervasive in the interface; however we currently do not offer a
+    way to track the other distinctions in the type system.
+    """
+
+    __slots__ = ()
+
+    @abstractproperty
+    def mode(self):
+        pass
+
+    @abstractproperty
+    def name(self):
+        pass
+
+    @abstractmethod
+    def close(self):
+        pass
+
+    @abstractmethod
+    def closed(self):
+        pass
+
+    @abstractmethod
+    def fileno(self):
+        pass
+
+    @abstractmethod
+    def flush(self):
+        pass
+
+    @abstractmethod
+    def isatty(self):
+        pass
+
+    @abstractmethod
+    def read(self, n = -1):
+        pass
+
+    @abstractmethod
+    def readable(self):
+        pass
+
+    @abstractmethod
+    def readline(self, limit = -1):
+        pass
+
+    @abstractmethod
+    def readlines(self, hint = -1):
+        pass
+
+    @abstractmethod
+    def seek(self, offset, whence = 0):
+        pass
+
+    @abstractmethod
+    def seekable(self):
+        pass
+
+    @abstractmethod
+    def tell(self):
+        pass
+
+    @abstractmethod
+    def truncate(self, size = None):
+        pass
+
+    @abstractmethod
+    def writable(self):
+        pass
+
+    @abstractmethod
+    def write(self, s):
+        pass
+
+    @abstractmethod
+    def writelines(self, lines):
+        pass
+
+    @abstractmethod
+    def __enter__(self):
+        pass
+
+    @abstractmethod
+    def __exit__(self, type, value, traceback):
+        pass
+
+
+class BinaryIO(IO[bytes]):
+    """Typed version of the return of open() in binary mode."""
+
+    __slots__ = ()
+
+    @abstractmethod
+    def write(self, s):
+        pass
+
+    @abstractmethod
+    def __enter__(self):
+        pass
+
+
+class TextIO(IO[unicode]):
+    """Typed version of the return of open() in text mode."""
+
+    __slots__ = ()
+
+    @abstractproperty
+    def buffer(self):
+        pass
+
+    @abstractproperty
+    def encoding(self):
+        pass
+
+    @abstractproperty
+    def errors(self):
+        pass
+
+    @abstractproperty
+    def line_buffering(self):
+        pass
+
+    @abstractproperty
+    def newlines(self):
+        pass
+
+    @abstractmethod
+    def __enter__(self):
+        pass
+
+
+class io(object):
+    """Wrapper namespace for IO generic classes."""
+
+    __all__ = ['IO', 'TextIO', 'BinaryIO']
+    IO = IO
+    TextIO = TextIO
+    BinaryIO = BinaryIO
+
+io.__name__ = __name__ + b'.io'
+sys.modules[io.__name__] = io
+
+
+Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')),
+                     lambda p: p.pattern)
+Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')),
+                   lambda m: m.re.pattern)
+
+
+class re(object):
+    """Wrapper namespace for re type aliases."""
+
+    __all__ = ['Pattern', 'Match']
+    Pattern = Pattern
+    Match = Match
+
+re.__name__ = __name__ + b'.re'
+sys.modules[re.__name__] = re
diff --git a/lib-typing/3.2/test_typing.py b/lib-typing/3.2/test_typing.py
new file mode 100644
index 0000000..d203ce3
--- /dev/null
+++ b/lib-typing/3.2/test_typing.py
@@ -0,0 +1,2063 @@
+import contextlib
+import collections
+import pickle
+import re
+import sys
+from unittest import TestCase, main, skipUnless, SkipTest
+from copy import copy, deepcopy
+
+from typing import Any
+from typing import TypeVar, AnyStr
+from typing import T, KT, VT  # Not in __all__.
+from typing import Union, Optional
+from typing import Tuple, List, MutableMapping
+from typing import Callable
+from typing import Generic, ClassVar
+from typing import cast
+from typing import get_type_hints
+from typing import no_type_check, no_type_check_decorator
+from typing import Type
+from typing import NewType
+from typing import NamedTuple
+from typing import IO, TextIO, BinaryIO
+from typing import Pattern, Match
+import abc
+import typing
+try:
+    import collections.abc as collections_abc
+except ImportError:
+    import collections as collections_abc  # Fallback for PY3.2.
+
+
+class BaseTestCase(TestCase):
+
+    def assertIsSubclass(self, cls, class_or_tuple, msg=None):
+        if not issubclass(cls, class_or_tuple):
+            message = '%r is not a subclass of %r' % (cls, class_or_tuple)
+            if msg is not None:
+                message += ' : %s' % msg
+            raise self.failureException(message)
+
+    def assertNotIsSubclass(self, cls, class_or_tuple, msg=None):
+        if issubclass(cls, class_or_tuple):
+            message = '%r is a subclass of %r' % (cls, class_or_tuple)
+            if msg is not None:
+                message += ' : %s' % msg
+            raise self.failureException(message)
+
+    def clear_caches(self):
+        for f in typing._cleanups:
+            f()
+
+
+class Employee:
+    pass
+
+
+class Manager(Employee):
+    pass
+
+
+class Founder(Employee):
+    pass
+
+
+class ManagingFounder(Manager, Founder):
+    pass
+
+
+class AnyTests(BaseTestCase):
+
+    def test_any_instance_type_error(self):
+        with self.assertRaises(TypeError):
+            isinstance(42, Any)
+
+    def test_any_subclass_type_error(self):
+        with self.assertRaises(TypeError):
+            issubclass(Employee, Any)
+        with self.assertRaises(TypeError):
+            issubclass(Any, Employee)
+
+    def test_repr(self):
+        self.assertEqual(repr(Any), 'typing.Any')
+
+    def test_errors(self):
+        with self.assertRaises(TypeError):
+            issubclass(42, Any)
+        with self.assertRaises(TypeError):
+            Any[int]  # Any is not a generic type.
+
+    def test_cannot_subclass(self):
+        with self.assertRaises(TypeError):
+            class A(Any):
+                pass
+        with self.assertRaises(TypeError):
+            class A(type(Any)):
+                pass
+
+    def test_cannot_instantiate(self):
+        with self.assertRaises(TypeError):
+            Any()
+        with self.assertRaises(TypeError):
+            type(Any)()
+
+    def test_cannot_subscript(self):
+        with self.assertRaises(TypeError):
+            Any[int]
+
+    def test_any_works_with_alias(self):
+        # These expressions must simply not fail.
+        typing.Match[Any]
+        typing.Pattern[Any]
+        typing.IO[Any]
+
+
+class TypeVarTests(BaseTestCase):
+
+    def test_basic_plain(self):
+        T = TypeVar('T')
+        # T equals itself.
+        self.assertEqual(T, T)
+        # T is an instance of TypeVar
+        self.assertIsInstance(T, TypeVar)
+
+    def test_typevar_instance_type_error(self):
+        T = TypeVar('T')
+        with self.assertRaises(TypeError):
+            isinstance(42, T)
+
+    def test_typevar_subclass_type_error(self):
+        T = TypeVar('T')
+        with self.assertRaises(TypeError):
+            issubclass(int, T)
+        with self.assertRaises(TypeError):
+            issubclass(T, int)
+
+    def test_constrained_error(self):
+        with self.assertRaises(TypeError):
+            X = TypeVar('X', int)
+            X
+
+    def test_union_unique(self):
+        X = TypeVar('X')
+        Y = TypeVar('Y')
+        self.assertNotEqual(X, Y)
+        self.assertEqual(Union[X], X)
+        self.assertNotEqual(Union[X], Union[X, Y])
+        self.assertEqual(Union[X, X], X)
+        self.assertNotEqual(Union[X, int], Union[X])
+        self.assertNotEqual(Union[X, int], Union[int])
+        self.assertEqual(Union[X, int].__args__, (X, int))
+        self.assertEqual(Union[X, int].__parameters__, (X,))
+        self.assertIs(Union[X, int].__origin__, Union)
+
+    def test_union_constrained(self):
+        A = TypeVar('A', str, bytes)
+        self.assertNotEqual(Union[A, str], Union[A])
+
+    def test_repr(self):
+        self.assertEqual(repr(T), '~T')
+        self.assertEqual(repr(KT), '~KT')
+        self.assertEqual(repr(VT), '~VT')
+        self.assertEqual(repr(AnyStr), '~AnyStr')
+        T_co = TypeVar('T_co', covariant=True)
+        self.assertEqual(repr(T_co), '+T_co')
+        T_contra = TypeVar('T_contra', contravariant=True)
+        self.assertEqual(repr(T_contra), '-T_contra')
+
+    def test_no_redefinition(self):
+        self.assertNotEqual(TypeVar('T'), TypeVar('T'))
+        self.assertNotEqual(TypeVar('T', int, str), TypeVar('T', int, str))
+
+    def test_cannot_subclass_vars(self):
+        with self.assertRaises(TypeError):
+            class V(TypeVar('T')):
+                pass
+
+    def test_cannot_subclass_var_itself(self):
+        with self.assertRaises(TypeError):
+            class V(TypeVar):
+                pass
+
+    def test_cannot_instantiate_vars(self):
+        with self.assertRaises(TypeError):
+            TypeVar('A')()
+
+    def test_bound_errors(self):
+        with self.assertRaises(TypeError):
+            TypeVar('X', bound=42)
+        with self.assertRaises(TypeError):
+            TypeVar('X', str, float, bound=Employee)
+
+
+class UnionTests(BaseTestCase):
+
+    def test_basics(self):
+        u = Union[int, float]
+        self.assertNotEqual(u, Union)
+
+    def test_subclass_error(self):
+        with self.assertRaises(TypeError):
+            issubclass(int, Union)
+        with self.assertRaises(TypeError):
+            issubclass(Union, int)
+        with self.assertRaises(TypeError):
+            issubclass(int, Union[int, str])
+        with self.assertRaises(TypeError):
+            issubclass(Union[int, str], int)
+
+    def test_union_any(self):
+        u = Union[Any]
+        self.assertEqual(u, Any)
+        u1 = Union[int, Any]
+        u2 = Union[Any, int]
+        u3 = Union[Any, object]
+        self.assertEqual(u1, u2)
+        self.assertNotEqual(u1, Any)
+        self.assertNotEqual(u2, Any)
+        self.assertNotEqual(u3, Any)
+
+    def test_union_object(self):
+        u = Union[object]
+        self.assertEqual(u, object)
+        u = Union[int, object]
+        self.assertEqual(u, object)
+        u = Union[object, int]
+        self.assertEqual(u, object)
+
+    def test_unordered(self):
+        u1 = Union[int, float]
+        u2 = Union[float, int]
+        self.assertEqual(u1, u2)
+
+    def test_single_class_disappears(self):
+        t = Union[Employee]
+        self.assertIs(t, Employee)
+
+    def test_base_class_disappears(self):
+        u = Union[Employee, Manager, int]
+        self.assertEqual(u, Union[int, Employee])
+        u = Union[Manager, int, Employee]
+        self.assertEqual(u, Union[int, Employee])
+        u = Union[Employee, Manager]
+        self.assertIs(u, Employee)
+
+    def test_union_union(self):
+        u = Union[int, float]
+        v = Union[u, Employee]
+        self.assertEqual(v, Union[int, float, Employee])
+
+    def test_repr(self):
+        self.assertEqual(repr(Union), 'typing.Union')
+        u = Union[Employee, int]
+        self.assertEqual(repr(u), 'typing.Union[%s.Employee, int]' % __name__)
+        u = Union[int, Employee]
+        self.assertEqual(repr(u), 'typing.Union[int, %s.Employee]' % __name__)
+
+    def test_cannot_subclass(self):
+        with self.assertRaises(TypeError):
+            class C(Union):
+                pass
+        with self.assertRaises(TypeError):
+            class C(type(Union)):
+                pass
+        with self.assertRaises(TypeError):
+            class C(Union[int, str]):
+                pass
+
+    def test_cannot_instantiate(self):
+        with self.assertRaises(TypeError):
+            Union()
+        with self.assertRaises(TypeError):
+            type(Union)()
+        u = Union[int, float]
+        with self.assertRaises(TypeError):
+            u()
+        with self.assertRaises(TypeError):
+            type(u)()
+
+    def test_union_generalization(self):
+        self.assertFalse(Union[str, typing.Iterable[int]] == str)
+        self.assertFalse(Union[str, typing.Iterable[int]] == typing.Iterable[int])
+        self.assertTrue(Union[str, typing.Iterable] == typing.Iterable)
+
+    def test_optional(self):
+        o = Optional[int]
+        u = Union[int, None]
+        self.assertEqual(o, u)
+
+    def test_empty(self):
+        with self.assertRaises(TypeError):
+            Union[()]
+
+    def test_union_instance_type_error(self):
+        with self.assertRaises(TypeError):
+            isinstance(42, Union[int, str])
+
+    def test_union_str_pattern(self):
+        # Shouldn't crash; see http://bugs.python.org/issue25390
+        A = Union[str, Pattern]
+        A
+
+    def test_etree(self):
+        # See https://github.com/python/typing/issues/229
+        # (Only relevant for Python 2.)
+        try:
+            from xml.etree.cElementTree import Element
+        except ImportError:
+            raise SkipTest("cElementTree not found")
+        Union[Element, str]  # Shouldn't crash
+
+        def Elem(*args):
+            return Element(*args)
+
+        Union[Elem, str]  # Nor should this
+
+
+class TupleTests(BaseTestCase):
+
+    def test_basics(self):
+        with self.assertRaises(TypeError):
+            issubclass(Tuple, Tuple[int, str])
+        with self.assertRaises(TypeError):
+            issubclass(tuple, Tuple[int, str])
+
+        class TP(tuple): ...
+        self.assertTrue(issubclass(tuple, Tuple))
+        self.assertTrue(issubclass(TP, Tuple))
+
+    def test_equality(self):
+        self.assertEqual(Tuple[int], Tuple[int])
+        self.assertEqual(Tuple[int, ...], Tuple[int, ...])
+        self.assertNotEqual(Tuple[int], Tuple[int, int])
+        self.assertNotEqual(Tuple[int], Tuple[int, ...])
+
+    def test_tuple_subclass(self):
+        class MyTuple(tuple):
+            pass
+        self.assertTrue(issubclass(MyTuple, Tuple))
+
+    def test_tuple_instance_type_error(self):
+        with self.assertRaises(TypeError):
+            isinstance((0, 0), Tuple[int, int])
+        self.assertIsInstance((0, 0), Tuple)
+
+    def test_repr(self):
+        self.assertEqual(repr(Tuple), 'typing.Tuple')
+        self.assertEqual(repr(Tuple[()]), 'typing.Tuple[()]')
+        self.assertEqual(repr(Tuple[int, float]), 'typing.Tuple[int, float]')
+        self.assertEqual(repr(Tuple[int, ...]), 'typing.Tuple[int, ...]')
+
+    def test_errors(self):
+        with self.assertRaises(TypeError):
+            issubclass(42, Tuple)
+        with self.assertRaises(TypeError):
+            issubclass(42, Tuple[int])
+
+
+class CallableTests(BaseTestCase):
+
+    def test_self_subclass(self):
+        with self.assertRaises(TypeError):
+            self.assertTrue(issubclass(type(lambda x: x), Callable[[int], int]))
+        self.assertTrue(issubclass(type(lambda x: x), Callable))
+
+    def test_eq_hash(self):
+        self.assertEqual(Callable[[int], int], Callable[[int], int])
+        self.assertEqual(len({Callable[[int], int], Callable[[int], int]}), 1)
+        self.assertNotEqual(Callable[[int], int], Callable[[int], str])
+        self.assertNotEqual(Callable[[int], int], Callable[[str], int])
+        self.assertNotEqual(Callable[[int], int], Callable[[int, int], int])
+        self.assertNotEqual(Callable[[int], int], Callable[[], int])
+        self.assertNotEqual(Callable[[int], int], Callable)
+
+    def test_cannot_instantiate(self):
+        with self.assertRaises(TypeError):
+            Callable()
+        with self.assertRaises(TypeError):
+            type(Callable)()
+        c = Callable[[int], str]
+        with self.assertRaises(TypeError):
+            c()
+        with self.assertRaises(TypeError):
+            type(c)()
+
+    def test_callable_wrong_forms(self):
+        with self.assertRaises(TypeError):
+            Callable[[...], int]
+        with self.assertRaises(TypeError):
+            Callable[(), int]
+        with self.assertRaises(TypeError):
+            Callable[[()], int]
+        with self.assertRaises(TypeError):
+            Callable[[int, 1], 2]
+
+    def test_callable_instance_works(self):
+        def f():
+            pass
+        self.assertIsInstance(f, Callable)
+        self.assertNotIsInstance(None, Callable)
+
+    def test_callable_instance_type_error(self):
+        def f():
+            pass
+        with self.assertRaises(TypeError):
+            self.assertIsInstance(f, Callable[[], None])
+        with self.assertRaises(TypeError):
+            self.assertIsInstance(f, Callable[[], Any])
+        with self.assertRaises(TypeError):
+            self.assertNotIsInstance(None, Callable[[], None])
+        with self.assertRaises(TypeError):
+            self.assertNotIsInstance(None, Callable[[], Any])
+
+    def test_repr(self):
+        ct0 = Callable[[], bool]
+        self.assertEqual(repr(ct0), 'typing.Callable[[], bool]')
+        ct2 = Callable[[str, float], int]
+        self.assertEqual(repr(ct2), 'typing.Callable[[str, float], int]')
+        ctv = Callable[..., str]
+        self.assertEqual(repr(ctv), 'typing.Callable[..., str]')
+
+    def test_callable_with_ellipsis(self):
+
+        def foo(a: Callable[..., T]):
+            pass
+
+        self.assertEqual(get_type_hints(foo, globals(), locals()),
+                         {'a': Callable[..., T]})
+
+    def test_ellipsis_in_generic(self):
+        # Shouldn't crash; see https://github.com/python/typing/issues/259
+        typing.List[Callable[..., str]]
+
+
+XK = TypeVar('XK', str, bytes)
+XV = TypeVar('XV')
+
+
+class SimpleMapping(Generic[XK, XV]):
+
+    def __getitem__(self, key: XK) -> XV:
+        ...
+
+    def __setitem__(self, key: XK, value: XV):
+        ...
+
+    def get(self, key: XK, default: XV = None) -> XV:
+        ...
+
+
+class MySimpleMapping(SimpleMapping[XK, XV]):
+
+    def __init__(self):
+        self.store = {}
+
+    def __getitem__(self, key: str):
+        return self.store[key]
+
+    def __setitem__(self, key: str, value):
+        self.store[key] = value
+
+    def get(self, key: str, default=None):
+        try:
+            return self.store[key]
+        except KeyError:
+            return default
+
+
+class ProtocolTests(BaseTestCase):
+
+    def test_supports_int(self):
+        self.assertIsSubclass(int, typing.SupportsInt)
+        self.assertNotIsSubclass(str, typing.SupportsInt)
+
+    def test_supports_float(self):
+        self.assertIsSubclass(float, typing.SupportsFloat)
+        self.assertNotIsSubclass(str, typing.SupportsFloat)
+
+    def test_supports_complex(self):
+
+        # Note: complex itself doesn't have __complex__.
+        class C:
+            def __complex__(self):
+                return 0j
+
+        self.assertIsSubclass(C, typing.SupportsComplex)
+        self.assertNotIsSubclass(str, typing.SupportsComplex)
+
+    def test_supports_bytes(self):
+
+        # Note: bytes itself doesn't have __bytes__.
+        class B:
+            def __bytes__(self):
+                return b''
+
+        self.assertIsSubclass(B, typing.SupportsBytes)
+        self.assertNotIsSubclass(str, typing.SupportsBytes)
+
+    def test_supports_abs(self):
+        self.assertIsSubclass(float, typing.SupportsAbs)
+        self.assertIsSubclass(int, typing.SupportsAbs)
+        self.assertNotIsSubclass(str, typing.SupportsAbs)
+
+    def test_supports_round(self):
+        issubclass(float, typing.SupportsRound)
+        self.assertIsSubclass(float, typing.SupportsRound)
+        self.assertIsSubclass(int, typing.SupportsRound)
+        self.assertNotIsSubclass(str, typing.SupportsRound)
+
+    def test_reversible(self):
+        self.assertIsSubclass(list, typing.Reversible)
+        self.assertNotIsSubclass(int, typing.Reversible)
+
+    def test_protocol_instance_type_error(self):
+        with self.assertRaises(TypeError):
+            isinstance(0, typing.SupportsAbs)
+        class C1(typing.SupportsInt):
+            def __int__(self) -> int:
+                return 42
+        class C2(C1):
+            pass
+        c = C2()
+        self.assertIsInstance(c, C1)
+
+
+class GenericTests(BaseTestCase):
+
+    def test_basics(self):
+        X = SimpleMapping[str, Any]
+        self.assertEqual(X.__parameters__, ())
+        with self.assertRaises(TypeError):
+            X[str]
+        with self.assertRaises(TypeError):
+            X[str, str]
+        Y = SimpleMapping[XK, str]
+        self.assertEqual(Y.__parameters__, (XK,))
+        Y[str]
+        with self.assertRaises(TypeError):
+            Y[str, str]
+
+    def test_generic_errors(self):
+        T = TypeVar('T')
+        with self.assertRaises(TypeError):
+            Generic[T]()
+        with self.assertRaises(TypeError):
+            isinstance([], List[int])
+        with self.assertRaises(TypeError):
+            issubclass(list, List[int])
+
+    def test_init(self):
+        T = TypeVar('T')
+        S = TypeVar('S')
+        with self.assertRaises(TypeError):
+            Generic[T, T]
+        with self.assertRaises(TypeError):
+            Generic[T, S, T]
+
+    def test_repr(self):
+        self.assertEqual(repr(SimpleMapping),
+                         __name__ + '.' + 'SimpleMapping')
+        self.assertEqual(repr(MySimpleMapping),
+                         __name__ + '.' + 'MySimpleMapping')
+
+    def test_chain_repr(self):
+        T = TypeVar('T')
+        S = TypeVar('S')
+
+        class C(Generic[T]):
+            pass
+
+        X = C[Tuple[S, T]]
+        self.assertEqual(X, C[Tuple[S, T]])
+        self.assertNotEqual(X, C[Tuple[T, S]])
+
+        Y = X[T, int]
+        self.assertEqual(Y, X[T, int])
+        self.assertNotEqual(Y, X[S, int])
+        self.assertNotEqual(Y, X[T, str])
+
+        Z = Y[str]
+        self.assertEqual(Z, Y[str])
+        self.assertNotEqual(Z, Y[int])
+        self.assertNotEqual(Z, Y[T])
+
+        self.assertTrue(str(Z).endswith(
+            '.C[typing.Tuple[str, int]]'))
+
+    def test_new_repr(self):
+        T = TypeVar('T')
+        U = TypeVar('U', covariant=True)
+        S = TypeVar('S')
+
+        self.assertEqual(repr(List), 'typing.List')
+        self.assertEqual(repr(List[T]), 'typing.List[~T]')
+        self.assertEqual(repr(List[U]), 'typing.List[+U]')
+        self.assertEqual(repr(List[S][T][int]), 'typing.List[int]')
+        self.assertEqual(repr(List[int]), 'typing.List[int]')
+
+    def test_new_repr_complex(self):
+        T = TypeVar('T')
+        TS = TypeVar('TS')
+
+        self.assertEqual(repr(typing.Mapping[T, TS][TS, T]), 'typing.Mapping[~TS, ~T]')
+        self.assertEqual(repr(List[Tuple[T, TS]][int, T]),
+                         'typing.List[typing.Tuple[int, ~T]]')
+        self.assertEqual(repr(List[Tuple[T, T]][List[int]]),
+                 'typing.List[typing.Tuple[typing.List[int], typing.List[int]]]')
+
+    def test_new_repr_bare(self):
+        T = TypeVar('T')
+        self.assertEqual(repr(Generic[T]), 'typing.Generic[~T]')
+        self.assertEqual(repr(typing._Protocol[T]), 'typing.Protocol[~T]')
+        class C(typing.Dict[Any, Any]): ...
+        # this line should just work
+        repr(C.__mro__)
+
+    def test_dict(self):
+        T = TypeVar('T')
+
+        class B(Generic[T]):
+            pass
+
+        b = B()
+        b.foo = 42
+        self.assertEqual(b.__dict__, {'foo': 42})
+
+        class C(B[int]):
+            pass
+
+        c = C()
+        c.bar = 'abc'
+        self.assertEqual(c.__dict__, {'bar': 'abc'})
+
+    def test_false_subclasses(self):
+        class MyMapping(MutableMapping[str, str]): pass
+        self.assertNotIsInstance({}, MyMapping)
+        self.assertNotIsSubclass(dict, MyMapping)
+
+    def test_abc_bases(self):
+        class MM(MutableMapping[str, str]):
+            def __getitem__(self, k):
+                return None
+            def __setitem__(self, k, v):
+                pass
+            def __delitem__(self, k):
+                pass
+            def __iter__(self):
+                return iter(())
+            def __len__(self):
+                return 0
+        # this should just work
+        MM().update()
+        self.assertIsInstance(MM(), collections_abc.MutableMapping)
+        self.assertIsInstance(MM(), MutableMapping)
+        self.assertNotIsInstance(MM(), List)
+        self.assertNotIsInstance({}, MM)
+
+    def test_multiple_bases(self):
+        class MM1(MutableMapping[str, str], collections_abc.MutableMapping):
+            pass
+        with self.assertRaises(TypeError):
+            # consistent MRO not possible
+            class MM2(collections_abc.MutableMapping, MutableMapping[str, str]):
+                pass
+
+    def test_orig_bases(self):
+        T = TypeVar('T')
+        class C(typing.Dict[str, T]): ...
+        self.assertEqual(C.__orig_bases__, (typing.Dict[str, T],))
+
+    def test_naive_runtime_checks(self):
+        def naive_dict_check(obj, tp):
+            # Check if a dictionary conforms to Dict type
+            if len(tp.__parameters__) > 0:
+                raise NotImplementedError
+            if tp.__args__:
+                KT, VT = tp.__args__
+                return all(isinstance(k, KT) and isinstance(v, VT)
+                   for k, v in obj.items())
+        self.assertTrue(naive_dict_check({'x': 1}, typing.Dict[str, int]))
+        self.assertFalse(naive_dict_check({1: 'x'}, typing.Dict[str, int]))
+        with self.assertRaises(NotImplementedError):
+            naive_dict_check({1: 'x'}, typing.Dict[str, T])
+
+        def naive_generic_check(obj, tp):
+            # Check if an instance conforms to the generic class
+            if not hasattr(obj, '__orig_class__'):
+                raise NotImplementedError
+            return obj.__orig_class__ == tp
+        class Node(Generic[T]): ...
+        self.assertTrue(naive_generic_check(Node[int](), Node[int]))
+        self.assertFalse(naive_generic_check(Node[str](), Node[int]))
+        self.assertFalse(naive_generic_check(Node[str](), List))
+        with self.assertRaises(NotImplementedError):
+            naive_generic_check([1,2,3], Node[int])
+
+        def naive_list_base_check(obj, tp):
+            # Check if list conforms to a List subclass
+            return all(isinstance(x, tp.__orig_bases__[0].__args__[0])
+                       for x in obj)
+        class C(List[int]): ...
+        self.assertTrue(naive_list_base_check([1, 2, 3], C))
+        self.assertFalse(naive_list_base_check(['a', 'b'], C))
+
+    def test_multi_subscr_base(self):
+        T = TypeVar('T')
+        U = TypeVar('U')
+        V = TypeVar('V')
+        class C(List[T][U][V]): ...
+        class D(C, List[T][U][V]): ...
+        self.assertEqual(C.__parameters__, (V,))
+        self.assertEqual(D.__parameters__, (V,))
+        self.assertEqual(C[int].__parameters__, ())
+        self.assertEqual(D[int].__parameters__, ())
+        self.assertEqual(C[int].__args__, (int,))
+        self.assertEqual(D[int].__args__, (int,))
+        self.assertEqual(C.__bases__, (List,))
+        self.assertEqual(D.__bases__, (C, List))
+        self.assertEqual(C.__orig_bases__, (List[T][U][V],))
+        self.assertEqual(D.__orig_bases__, (C, List[T][U][V]))
+
+    def test_extended_generic_rules_eq(self):
+        T = TypeVar('T')
+        U = TypeVar('U')
+        self.assertEqual(Tuple[T, T][int], Tuple[int, int])
+        self.assertEqual(typing.Iterable[Tuple[T, T]][T], typing.Iterable[Tuple[T, T]])
+        with self.assertRaises(TypeError):
+            Tuple[T, int][()]
+        with self.assertRaises(TypeError):
+            Tuple[T, U][T, ...]
+
+        self.assertEqual(Union[T, int][int], int)
+        self.assertEqual(Union[T, U][int, Union[int, str]], Union[int, str])
+        class Base: ...
+        class Derived(Base): ...
+        self.assertEqual(Union[T, Base][Derived], Base)
+        with self.assertRaises(TypeError):
+            Union[T, int][1]
+
+        self.assertEqual(Callable[[T], T][KT], Callable[[KT], KT])
+        self.assertEqual(Callable[..., List[T]][int], Callable[..., List[int]])
+        with self.assertRaises(TypeError):
+            Callable[[T], U][..., int]
+        with self.assertRaises(TypeError):
+            Callable[[T], U][[], int]
+
+    def test_extended_generic_rules_repr(self):
+        T = TypeVar('T')
+        self.assertEqual(repr(Union[Tuple, Callable]).replace('typing.', ''),
+                         'Union[Tuple, Callable]')
+        self.assertEqual(repr(Union[Tuple, Tuple[int]]).replace('typing.', ''),
+                         'Tuple')
+        self.assertEqual(repr(Callable[..., Optional[T]][int]).replace('typing.', ''),
+                         'Callable[..., Union[int, NoneType]]')
+        self.assertEqual(repr(Callable[[], List[T]][int]).replace('typing.', ''),
+                         'Callable[[], List[int]]')
+
+    def test_generic_forward_ref(self):
+        def foobar(x: List[List['CC']]): ...
+        class CC: ...
+        self.assertEqual(get_type_hints(foobar, globals(), locals()), {'x': List[List[CC]]})
+        T = TypeVar('T')
+        AT = Tuple[T, ...]
+        def barfoo(x: AT): ...
+        self.assertIs(get_type_hints(barfoo, globals(), locals())['x'], AT)
+        CT = Callable[..., List[T]]
+        def barfoo2(x: CT): ...
+        self.assertIs(get_type_hints(barfoo2, globals(), locals())['x'], CT)
+
+    def test_extended_generic_rules_subclassing(self):
+        class T1(Tuple[T, KT]): ...
+        class T2(Tuple[T, ...]): ...
+        class C1(Callable[[T], T]): ...
+        class C2(Callable[..., int]):
+            def __call__(self):
+                return None
+
+        self.assertEqual(T1.__parameters__, (T, KT))
+        self.assertEqual(T1[int, str].__args__, (int, str))
+        self.assertEqual(T1[int, T].__origin__, T1)
+
+        self.assertEqual(T2.__parameters__, (T,))
+        with self.assertRaises(TypeError):
+            T1[int]
+        with self.assertRaises(TypeError):
+            T2[int, str]
+
+        self.assertEqual(repr(C1[int]).split('.')[-1], 'C1[int]')
+        self.assertEqual(C2.__parameters__, ())
+        self.assertIsInstance(C2(), collections_abc.Callable)
+        self.assertIsSubclass(C2, collections_abc.Callable)
+        self.assertIsSubclass(C1, collections_abc.Callable)
+        self.assertIsInstance(T1(), tuple)
+        self.assertIsSubclass(T2, tuple)
+        self.assertIsSubclass(Tuple[int, ...], typing.Sequence)
+        self.assertIsSubclass(Tuple[int, ...], typing.Iterable)
+
+    def test_fail_with_bare_union(self):
+        with self.assertRaises(TypeError):
+            List[Union]
+        with self.assertRaises(TypeError):
+            Tuple[Optional]
+        with self.assertRaises(TypeError):
+            ClassVar[ClassVar]
+        with self.assertRaises(TypeError):
+            List[ClassVar[int]]
+
+    def test_fail_with_bare_generic(self):
+        T = TypeVar('T')
+        with self.assertRaises(TypeError):
+            List[Generic]
+        with self.assertRaises(TypeError):
+            Tuple[Generic[T]]
+        with self.assertRaises(TypeError):
+            List[typing._Protocol]
+
+    def test_type_erasure_special(self):
+        T = TypeVar('T')
+        # this is the only test that checks type caching
+        self.clear_caches()
+        class MyTup(Tuple[T, T]): ...
+        self.assertIs(MyTup[int]().__class__, MyTup)
+        self.assertIs(MyTup[int]().__orig_class__, MyTup[int])
+        class MyCall(Callable[..., T]):
+            def __call__(self): return None
+        self.assertIs(MyCall[T]().__class__, MyCall)
+        self.assertIs(MyCall[T]().__orig_class__, MyCall[T])
+        class MyDict(typing.Dict[T, T]): ...
+        self.assertIs(MyDict[int]().__class__, MyDict)
+        self.assertIs(MyDict[int]().__orig_class__, MyDict[int])
+        class MyDef(typing.DefaultDict[str, T]): ...
+        self.assertIs(MyDef[int]().__class__, MyDef)
+        self.assertIs(MyDef[int]().__orig_class__, MyDef[int])
+
+    def test_all_repr_eq_any(self):
+        objs = (getattr(typing, el) for el in typing.__all__)
+        for obj in objs:
+            self.assertNotEqual(repr(obj), '')
+            self.assertEqual(obj, obj)
+            if getattr(obj, '__parameters__', None) and len(obj.__parameters__) == 1:
+                self.assertEqual(obj[Any].__args__, (Any,))
+            if isinstance(obj, type):
+                for base in obj.__mro__:
+                    self.assertNotEqual(repr(base), '')
+                    self.assertEqual(base, base)
+
+    def test_substitution_helper(self):
+        T = TypeVar('T')
+        KT = TypeVar('KT')
+        VT = TypeVar('VT')
+        class Map(Generic[KT, VT]):
+            def meth(self, k: KT, v: VT): ...
+        StrMap = Map[str, T]
+        obj = StrMap[int]()
+
+        new_args = typing._subs_tree(obj.__orig_class__)
+        new_annots = {k: typing._replace_arg(v, type(obj).__parameters__, new_args)
+                      for k, v in obj.meth.__annotations__.items()}
+
+        self.assertEqual(new_annots, {'k': str, 'v': int})
+
+    def test_pickle(self):
+        global C  # pickle wants to reference the class by name
+        T = TypeVar('T')
+
+        class B(Generic[T]):
+            pass
+
+        class C(B[int]):
+            pass
+
+        c = C()
+        c.foo = 42
+        c.bar = 'abc'
+        for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+            z = pickle.dumps(c, proto)
+            x = pickle.loads(z)
+            self.assertEqual(x.foo, 42)
+            self.assertEqual(x.bar, 'abc')
+            self.assertEqual(x.__dict__, {'foo': 42, 'bar': 'abc'})
+        simples = [Any, Union, Tuple, Callable, ClassVar, List, typing.Iterable]
+        for s in simples:
+            for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+                z = pickle.dumps(s, proto)
+                x = pickle.loads(z)
+                self.assertEqual(s, x)
+
+    def test_copy_and_deepcopy(self):
+        T = TypeVar('T')
+        class Node(Generic[T]): ...
+        things = [Union[T, int], Tuple[T, int], Callable[..., T], Callable[[int], int],
+                  Tuple[Any, Any], Node[T], Node[int], Node[Any], typing.Iterable[T],
+                  typing.Iterable[Any], typing.Iterable[int], typing.Dict[int, str],
+                  typing.Dict[T, Any], ClassVar[int], ClassVar[List[T]], Tuple['T', 'T'],
+                  Union['T', int], List['T'], typing.Mapping['T', int]]
+        for t in things + [Any]:
+            self.assertEqual(t, copy(t))
+            self.assertEqual(t, deepcopy(t))
+
+    def test_parameterized_slots(self):
+        T = TypeVar('T')
+        class C(Generic[T]):
+            __slots__ = ('potato',)
+
+        c = C()
+        c_int = C[int]()
+        self.assertEqual(C.__slots__, C[str].__slots__)
+
+        c.potato = 0
+        c_int.potato = 0
+        with self.assertRaises(AttributeError):
+            c.tomato = 0
+        with self.assertRaises(AttributeError):
+            c_int.tomato = 0
+
+        def foo(x: C['C']): ...
+        self.assertEqual(get_type_hints(foo, globals(), locals())['x'], C[C])
+        self.assertEqual(get_type_hints(foo, globals(), locals())['x'].__slots__,
+                         C.__slots__)
+        self.assertEqual(copy(C[int]), deepcopy(C[int]))
+
+    def test_parameterized_slots_dict(self):
+        T = TypeVar('T')
+        class D(Generic[T]):
+            __slots__ = {'banana': 42}
+
+        d = D()
+        d_int = D[int]()
+        self.assertEqual(D.__slots__, D[str].__slots__)
+
+        d.banana = 'yes'
+        d_int.banana = 'yes'
+        with self.assertRaises(AttributeError):
+            d.foobar = 'no'
+        with self.assertRaises(AttributeError):
+            d_int.foobar = 'no'
+
+    def test_errors(self):
+        with self.assertRaises(TypeError):
+            B = SimpleMapping[XK, Any]
+
+            class C(Generic[B]):
+                pass
+
+    def test_repr_2(self):
+        PY32 = sys.version_info[:2] < (3, 3)
+
+        class C(Generic[T]):
+            pass
+
+        self.assertEqual(C.__module__, __name__)
+        if not PY32:
+            self.assertEqual(C.__qualname__,
+                             'GenericTests.test_repr_2.<locals>.C')
+        self.assertEqual(repr(C).split('.')[-1], 'C')
+        X = C[int]
+        self.assertEqual(X.__module__, __name__)
+        if not PY32:
+            self.assertTrue(X.__qualname__.endswith('.<locals>.C'))
+        self.assertEqual(repr(X).split('.')[-1], 'C[int]')
+
+        class Y(C[int]):
+            pass
+
+        self.assertEqual(Y.__module__, __name__)
+        if not PY32:
+            self.assertEqual(Y.__qualname__,
+                             'GenericTests.test_repr_2.<locals>.Y')
+        self.assertEqual(repr(Y).split('.')[-1], 'Y')
+
+    def test_eq_1(self):
+        self.assertEqual(Generic, Generic)
+        self.assertEqual(Generic[T], Generic[T])
+        self.assertNotEqual(Generic[KT], Generic[VT])
+
+    def test_eq_2(self):
+
+        class A(Generic[T]):
+            pass
+
+        class B(Generic[T]):
+            pass
+
+        self.assertEqual(A, A)
+        self.assertNotEqual(A, B)
+        self.assertEqual(A[T], A[T])
+        self.assertNotEqual(A[T], B[T])
+
+    def test_multiple_inheritance(self):
+
+        class A(Generic[T, VT]):
+            pass
+
+        class B(Generic[KT, T]):
+            pass
+
+        class C(A[T, VT], Generic[VT, T, KT], B[KT, T]):
+            pass
+
+        self.assertEqual(C.__parameters__, (VT, T, KT))
+
+    def test_nested(self):
+
+        G = Generic
+
+        class Visitor(G[T]):
+
+            a = None
+
+            def set(self, a: T):
+                self.a = a
+
+            def get(self):
+                return self.a
+
+            def visit(self) -> T:
+                return self.a
+
+        V = Visitor[typing.List[int]]
+
+        class IntListVisitor(V):
+
+            def append(self, x: int):
+                self.a.append(x)
+
+        a = IntListVisitor()
+        a.set([])
+        a.append(1)
+        a.append(42)
+        self.assertEqual(a.get(), [1, 42])
+
+    def test_type_erasure(self):
+        T = TypeVar('T')
+
+        class Node(Generic[T]):
+            def __init__(self, label: T,
+                         left: 'Node[T]' = None,
+                         right: 'Node[T]' = None):
+                self.label = label  # type: T
+                self.left = left  # type: Optional[Node[T]]
+                self.right = right  # type: Optional[Node[T]]
+
+        def foo(x: T):
+            a = Node(x)
+            b = Node[T](x)
+            c = Node[Any](x)
+            self.assertIs(type(a), Node)
+            self.assertIs(type(b), Node)
+            self.assertIs(type(c), Node)
+            self.assertEqual(a.label, x)
+            self.assertEqual(b.label, x)
+            self.assertEqual(c.label, x)
+
+        foo(42)
+
+    def test_implicit_any(self):
+        T = TypeVar('T')
+
+        class C(Generic[T]):
+            pass
+
+        class D(C):
+            pass
+
+        self.assertEqual(D.__parameters__, ())
+
+        with self.assertRaises(Exception):
+            D[int]
+        with self.assertRaises(Exception):
+            D[Any]
+        with self.assertRaises(Exception):
+            D[T]
+
+class ClassVarTests(BaseTestCase):
+
+    def test_basics(self):
+        with self.assertRaises(TypeError):
+            ClassVar[1]
+        with self.assertRaises(TypeError):
+            ClassVar[int, str]
+        with self.assertRaises(TypeError):
+            ClassVar[int][str]
+
+    def test_repr(self):
+        self.assertEqual(repr(ClassVar), 'typing.ClassVar')
+        cv = ClassVar[int]
+        self.assertEqual(repr(cv), 'typing.ClassVar[int]')
+        cv = ClassVar[Employee]
+        self.assertEqual(repr(cv), 'typing.ClassVar[%s.Employee]' % __name__)
+
+    def test_cannot_subclass(self):
+        with self.assertRaises(TypeError):
+            class C(type(ClassVar)):
+                pass
+        with self.assertRaises(TypeError):
+            class C(type(ClassVar[int])):
+                pass
+
+    def test_cannot_init(self):
+        with self.assertRaises(TypeError):
+            ClassVar()
+        with self.assertRaises(TypeError):
+            type(ClassVar)()
+        with self.assertRaises(TypeError):
+            type(ClassVar[Optional[int]])()
+
+    def test_no_isinstance(self):
+        with self.assertRaises(TypeError):
+            isinstance(1, ClassVar[int])
+        with self.assertRaises(TypeError):
+            issubclass(int, ClassVar)
+
+
+class CastTests(BaseTestCase):
+
+    def test_basics(self):
+        self.assertEqual(cast(int, 42), 42)
+        self.assertEqual(cast(float, 42), 42)
+        self.assertIs(type(cast(float, 42)), int)
+        self.assertEqual(cast(Any, 42), 42)
+        self.assertEqual(cast(list, 42), 42)
+        self.assertEqual(cast(Union[str, float], 42), 42)
+        self.assertEqual(cast(AnyStr, 42), 42)
+        self.assertEqual(cast(None, 42), 42)
+
+    def test_errors(self):
+        # Bogus calls are not expected to fail.
+        cast(42, 42)
+        cast('hello', 42)
+
+
+class ForwardRefTests(BaseTestCase):
+
+    def test_basics(self):
+
+        class Node(Generic[T]):
+
+            def __init__(self, label: T):
+                self.label = label
+                self.left = self.right = None
+
+            def add_both(self,
+                         left: 'Optional[Node[T]]',
+                         right: 'Node[T]' = None,
+                         stuff: int = None,
+                         blah=None):
+                self.left = left
+                self.right = right
+
+            def add_left(self, node: Optional['Node[T]']):
+                self.add_both(node, None)
+
+            def add_right(self, node: 'Node[T]' = None):
+                self.add_both(None, node)
+
+        t = Node[int]
+        both_hints = get_type_hints(t.add_both, globals(), locals())
+        self.assertEqual(both_hints['left'], Optional[Node[T]])
+        self.assertEqual(both_hints['right'], Optional[Node[T]])
+        self.assertEqual(both_hints['left'], both_hints['right'])
+        self.assertEqual(both_hints['stuff'], Optional[int])
+        self.assertNotIn('blah', both_hints)
+
+        left_hints = get_type_hints(t.add_left, globals(), locals())
+        self.assertEqual(left_hints['node'], Optional[Node[T]])
+
+        right_hints = get_type_hints(t.add_right, globals(), locals())
+        self.assertEqual(right_hints['node'], Optional[Node[T]])
+
+    def test_forwardref_instance_type_error(self):
+        fr = typing._ForwardRef('int')
+        with self.assertRaises(TypeError):
+            isinstance(42, fr)
+
+    def test_union_forward(self):
+
+        def foo(a: Union['T']):
+            pass
+
+        self.assertEqual(get_type_hints(foo, globals(), locals()),
+                         {'a': Union[T]})
+
+    def test_tuple_forward(self):
+
+        def foo(a: Tuple['T']):
+            pass
+
+        self.assertEqual(get_type_hints(foo, globals(), locals()),
+                         {'a': Tuple[T]})
+
+    def test_callable_forward(self):
+
+        def foo(a: Callable[['T'], 'T']):
+            pass
+
+        self.assertEqual(get_type_hints(foo, globals(), locals()),
+                         {'a': Callable[[T], T]})
+
+    def test_callable_with_ellipsis_forward(self):
+
+        def foo(a: 'Callable[..., T]'):
+            pass
+
+        self.assertEqual(get_type_hints(foo, globals(), locals()),
+                         {'a': Callable[..., T]})
+
+    def test_syntax_error(self):
+
+        with self.assertRaises(SyntaxError):
+            Generic['/T']
+
+    def test_delayed_syntax_error(self):
+
+        def foo(a: 'Node[T'):
+            pass
+
+        with self.assertRaises(SyntaxError):
+            get_type_hints(foo)
+
+    def test_type_error(self):
+
+        def foo(a: Tuple['42']):
+            pass
+
+        with self.assertRaises(TypeError):
+            get_type_hints(foo)
+
+    def test_name_error(self):
+
+        def foo(a: 'Noode[T]'):
+            pass
+
+        with self.assertRaises(NameError):
+            get_type_hints(foo, locals())
+
+    def test_no_type_check(self):
+
+        @no_type_check
+        def foo(a: 'whatevers') -> {}:
+            pass
+
+        th = get_type_hints(foo)
+        self.assertEqual(th, {})
+
+    def test_no_type_check_class(self):
+
+        @no_type_check
+        class C:
+            def foo(a: 'whatevers') -> {}:
+                pass
+
+        cth = get_type_hints(C.foo)
+        self.assertEqual(cth, {})
+        ith = get_type_hints(C().foo)
+        self.assertEqual(ith, {})
+
+    def test_meta_no_type_check(self):
+
+        @no_type_check_decorator
+        def magic_decorator(deco):
+            return deco
+
+        self.assertEqual(magic_decorator.__name__, 'magic_decorator')
+
+        @magic_decorator
+        def foo(a: 'whatevers') -> {}:
+            pass
+
+        @magic_decorator
+        class C:
+            def foo(a: 'whatevers') -> {}:
+                pass
+
+        self.assertEqual(foo.__name__, 'foo')
+        th = get_type_hints(foo)
+        self.assertEqual(th, {})
+        cth = get_type_hints(C.foo)
+        self.assertEqual(cth, {})
+        ith = get_type_hints(C().foo)
+        self.assertEqual(ith, {})
+
+    def test_default_globals(self):
+        code = ("class C:\n"
+                "    def foo(self, a: 'C') -> 'D': pass\n"
+                "class D:\n"
+                "    def bar(self, b: 'D') -> C: pass\n"
+                )
+        ns = {}
+        exec(code, ns)
+        hints = get_type_hints(ns['C'].foo)
+        self.assertEqual(hints, {'a': ns['C'], 'return': ns['D']})
+
+
+class OverloadTests(BaseTestCase):
+
+    def test_overload_exists(self):
+        from typing import overload
+
+    def test_overload_fails(self):
+        from typing import overload
+
+        with self.assertRaises(RuntimeError):
+
+            @overload
+            def blah():
+                pass
+
+            blah()
+
+    def test_overload_succeeds(self):
+        from typing import overload
+
+        @overload
+        def blah():
+            pass
+
+        def blah():
+            pass
+
+        blah()
+
+
+ASYNCIO = sys.version_info[:2] >= (3, 5)
+
+ASYNCIO_TESTS = """
+import asyncio
+
+T_a = TypeVar('T_a')
+
+class AwaitableWrapper(typing.Awaitable[T_a]):
+
+    def __init__(self, value):
+        self.value = value
+
+    def __await__(self) -> typing.Iterator[T_a]:
+        yield
+        return self.value
+
+class AsyncIteratorWrapper(typing.AsyncIterator[T_a]):
+
+    def __init__(self, value: typing.Iterable[T_a]):
+        self.value = value
+
+    def __aiter__(self) -> typing.AsyncIterator[T_a]:
+        return self
+
+    @asyncio.coroutine
+    def __anext__(self) -> T_a:
+        data = yield from self.value
+        if data:
+            return data
+        else:
+            raise StopAsyncIteration
+"""
+
+if ASYNCIO:
+    try:
+        exec(ASYNCIO_TESTS)
+    except ImportError:
+        ASYNCIO = False
+
+PY36 = sys.version_info[:2] >= (3, 6)
+
+PY36_TESTS = """
+from test import ann_module, ann_module2, ann_module3
+
+class A:
+    y: float
+class B(A):
+    x: ClassVar[Optional['B']] = None
+    y: int
+class CSub(B):
+    z: ClassVar['CSub'] = B()
+class G(Generic[T]):
+    lst: ClassVar[List[T]] = []
+
+class CoolEmployee(NamedTuple):
+    name: str
+    cool: int
+"""
+
+if PY36:
+    exec(PY36_TESTS)
+
+gth = get_type_hints
+
+class GetTypeHintTests(BaseTestCase):
+    def test_get_type_hints_from_various_objects(self):
+        # For invalid objects should fail with TypeError (not AttributeError etc).
+        with self.assertRaises(TypeError):
+            gth(123)
+        with self.assertRaises(TypeError):
+            gth('abc')
+        with self.assertRaises(TypeError):
+            gth(None)
+
+    @skipUnless(PY36, 'Python 3.6 required')
+    def test_get_type_hints_modules(self):
+        self.assertEqual(gth(ann_module), {1: 2, 'f': Tuple[int, int], 'x': int, 'y': str})
+        self.assertEqual(gth(ann_module2), {})
+        self.assertEqual(gth(ann_module3), {})
+
+    @skipUnless(PY36, 'Python 3.6 required')
+    def test_get_type_hints_classes(self):
+        self.assertEqual(gth(ann_module.C, ann_module.__dict__),
+                         {'y': Optional[ann_module.C]})
+        self.assertIsInstance(gth(ann_module.j_class), dict)
+        self.assertEqual(gth(ann_module.M), {'123': 123, 'o': type})
+        self.assertEqual(gth(ann_module.D),
+                         {'j': str, 'k': str, 'y': Optional[ann_module.C]})
+        self.assertEqual(gth(ann_module.Y), {'z': int})
+        self.assertEqual(gth(ann_module.h_class),
+                         {'y': Optional[ann_module.C]})
+        self.assertEqual(gth(ann_module.S), {'x': str, 'y': str})
+        self.assertEqual(gth(ann_module.foo), {'x': int})
+
+    @skipUnless(PY36, 'Python 3.6 required')
+    def test_respect_no_type_check(self):
+        @no_type_check
+        class NoTpCheck:
+            class Inn:
+                def __init__(self, x: 'not a type'): ...
+        self.assertTrue(NoTpCheck.__no_type_check__)
+        self.assertTrue(NoTpCheck.Inn.__init__.__no_type_check__)
+        self.assertEqual(gth(ann_module2.NTC.meth), {})
+        class ABase(Generic[T]):
+            def meth(x: int): ...
+        @no_type_check
+        class Der(ABase): ...
+        self.assertEqual(gth(ABase.meth), {'x': int})
+
+    def test_get_type_hints_for_builins(self):
+        # Should not fail for built-in classes and functions.
+        self.assertEqual(gth(int), {})
+        self.assertEqual(gth(type), {})
+        self.assertEqual(gth(dir), {})
+        self.assertEqual(gth(len), {})
+
+    def test_previous_behavior(self):
+        def testf(x, y): ...
+        testf.__annotations__['x'] = 'int'
+        self.assertEqual(gth(testf), {'x': int})
+
+    def test_get_type_hints_for_object_with_annotations(self):
+        class A: ...
+        class B: ...
+        b = B()
+        b.__annotations__ = {'x': 'A'}
+        self.assertEqual(gth(b, locals()), {'x': A})
+
+    @skipUnless(PY36, 'Python 3.6 required')
+    def test_get_type_hints_ClassVar(self):
+        self.assertEqual(gth(ann_module2.CV, ann_module2.__dict__),
+                         {'var': typing.ClassVar[ann_module2.CV]})
+        self.assertEqual(gth(B, globals()),
+                         {'y': int, 'x': ClassVar[Optional[B]]})
+        self.assertEqual(gth(CSub, globals()),
+                         {'z': ClassVar[CSub], 'y': int, 'x': ClassVar[Optional[B]]})
+        self.assertEqual(gth(G), {'lst': ClassVar[List[T]]})
+
+
+class CollectionsAbcTests(BaseTestCase):
+
+    def test_hashable(self):
+        self.assertIsInstance(42, typing.Hashable)
+        self.assertNotIsInstance([], typing.Hashable)
+
+    def test_iterable(self):
+        self.assertIsInstance([], typing.Iterable)
+        # Due to ABC caching, the second time takes a separate code
+        # path and could fail.  So call this a few times.
+        self.assertIsInstance([], typing.Iterable)
+        self.assertIsInstance([], typing.Iterable)
+        self.assertNotIsInstance(42, typing.Iterable)
+        # Just in case, also test issubclass() a few times.
+        self.assertIsSubclass(list, typing.Iterable)
+        self.assertIsSubclass(list, typing.Iterable)
+
+    def test_iterator(self):
+        it = iter([])
+        self.assertIsInstance(it, typing.Iterator)
+        self.assertNotIsInstance(42, typing.Iterator)
+
+    @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required')
+    def test_awaitable(self):
+        ns = {}
+        exec(
+            "async def foo() -> typing.Awaitable[int]:\n"
+            "    return await AwaitableWrapper(42)\n",
+            globals(), ns)
+        foo = ns['foo']
+        g = foo()
+        self.assertIsInstance(g, typing.Awaitable)
+        self.assertNotIsInstance(foo, typing.Awaitable)
+        g.send(None)  # Run foo() till completion, to avoid warning.
+
+    @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required')
+    def test_coroutine(self):
+        ns = {}
+        exec(
+            "async def foo():\n"
+            "    return\n",
+            globals(), ns)
+        foo = ns['foo']
+        g = foo()
+        self.assertIsInstance(g, typing.Coroutine)
+        with self.assertRaises(TypeError):
+            isinstance(g, typing.Coroutine[int])
+        self.assertNotIsInstance(foo, typing.Coroutine)
+        try:
+            g.send(None)
+        except StopIteration:
+            pass
+
+    @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required')
+    def test_async_iterable(self):
+        base_it = range(10)  # type: Iterator[int]
+        it = AsyncIteratorWrapper(base_it)
+        self.assertIsInstance(it, typing.AsyncIterable)
+        self.assertIsInstance(it, typing.AsyncIterable)
+        self.assertNotIsInstance(42, typing.AsyncIterable)
+
+    @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required')
+    def test_async_iterator(self):
+        base_it = range(10)  # type: Iterator[int]
+        it = AsyncIteratorWrapper(base_it)
+        self.assertIsInstance(it, typing.AsyncIterator)
+        self.assertNotIsInstance(42, typing.AsyncIterator)
+
+    def test_sized(self):
+        self.assertIsInstance([], typing.Sized)
+        self.assertNotIsInstance(42, typing.Sized)
+
+    def test_container(self):
+        self.assertIsInstance([], typing.Container)
+        self.assertNotIsInstance(42, typing.Container)
+
+    def test_collection(self):
+        if hasattr(typing, 'Collection'):
+            self.assertIsInstance(tuple(), typing.Collection)
+            self.assertIsInstance(frozenset(), typing.Collection)
+            self.assertIsSubclass(dict, typing.Collection)
+            self.assertNotIsInstance(42, typing.Collection)
+
+    def test_abstractset(self):
+        self.assertIsInstance(set(), typing.AbstractSet)
+        self.assertNotIsInstance(42, typing.AbstractSet)
+
+    def test_mutableset(self):
+        self.assertIsInstance(set(), typing.MutableSet)
+        self.assertNotIsInstance(frozenset(), typing.MutableSet)
+
+    def test_mapping(self):
+        self.assertIsInstance({}, typing.Mapping)
+        self.assertNotIsInstance(42, typing.Mapping)
+
+    def test_mutablemapping(self):
+        self.assertIsInstance({}, typing.MutableMapping)
+        self.assertNotIsInstance(42, typing.MutableMapping)
+
+    def test_sequence(self):
+        self.assertIsInstance([], typing.Sequence)
+        self.assertNotIsInstance(42, typing.Sequence)
+
+    def test_mutablesequence(self):
+        self.assertIsInstance([], typing.MutableSequence)
+        self.assertNotIsInstance((), typing.MutableSequence)
+
+    def test_bytestring(self):
+        self.assertIsInstance(b'', typing.ByteString)
+        self.assertIsInstance(bytearray(b''), typing.ByteString)
+
+    def test_list(self):
+        self.assertIsSubclass(list, typing.List)
+
+    def test_set(self):
+        self.assertIsSubclass(set, typing.Set)
+        self.assertNotIsSubclass(frozenset, typing.Set)
+
+    def test_frozenset(self):
+        self.assertIsSubclass(frozenset, typing.FrozenSet)
+        self.assertNotIsSubclass(set, typing.FrozenSet)
+
+    def test_dict(self):
+        self.assertIsSubclass(dict, typing.Dict)
+
+    def test_no_list_instantiation(self):
+        with self.assertRaises(TypeError):
+            typing.List()
+        with self.assertRaises(TypeError):
+            typing.List[T]()
+        with self.assertRaises(TypeError):
+            typing.List[int]()
+
+    def test_list_subclass(self):
+
+        class MyList(typing.List[int]):
+            pass
+
+        a = MyList()
+        self.assertIsInstance(a, MyList)
+        self.assertIsInstance(a, typing.Sequence)
+
+        self.assertIsSubclass(MyList, list)
+        self.assertNotIsSubclass(list, MyList)
+
+    def test_no_dict_instantiation(self):
+        with self.assertRaises(TypeError):
+            typing.Dict()
+        with self.assertRaises(TypeError):
+            typing.Dict[KT, VT]()
+        with self.assertRaises(TypeError):
+            typing.Dict[str, int]()
+
+    def test_dict_subclass(self):
+
+        class MyDict(typing.Dict[str, int]):
+            pass
+
+        d = MyDict()
+        self.assertIsInstance(d, MyDict)
+        self.assertIsInstance(d, typing.MutableMapping)
+
+        self.assertIsSubclass(MyDict, dict)
+        self.assertNotIsSubclass(dict, MyDict)
+
+    def test_no_defaultdict_instantiation(self):
+        with self.assertRaises(TypeError):
+            typing.DefaultDict()
+        with self.assertRaises(TypeError):
+            typing.DefaultDict[KT, VT]()
+        with self.assertRaises(TypeError):
+            typing.DefaultDict[str, int]()
+
+    def test_defaultdict_subclass(self):
+
+        class MyDefDict(typing.DefaultDict[str, int]):
+            pass
+
+        dd = MyDefDict()
+        self.assertIsInstance(dd, MyDefDict)
+
+        self.assertIsSubclass(MyDefDict, collections.defaultdict)
+        self.assertNotIsSubclass(collections.defaultdict, MyDefDict)
+
+    def test_no_set_instantiation(self):
+        with self.assertRaises(TypeError):
+            typing.Set()
+        with self.assertRaises(TypeError):
+            typing.Set[T]()
+        with self.assertRaises(TypeError):
+            typing.Set[int]()
+
+    def test_set_subclass_instantiation(self):
+
+        class MySet(typing.Set[int]):
+            pass
+
+        d = MySet()
+        self.assertIsInstance(d, MySet)
+
+    def test_no_frozenset_instantiation(self):
+        with self.assertRaises(TypeError):
+            typing.FrozenSet()
+        with self.assertRaises(TypeError):
+            typing.FrozenSet[T]()
+        with self.assertRaises(TypeError):
+            typing.FrozenSet[int]()
+
+    def test_frozenset_subclass_instantiation(self):
+
+        class MyFrozenSet(typing.FrozenSet[int]):
+            pass
+
+        d = MyFrozenSet()
+        self.assertIsInstance(d, MyFrozenSet)
+
+    def test_no_tuple_instantiation(self):
+        with self.assertRaises(TypeError):
+            Tuple()
+        with self.assertRaises(TypeError):
+            Tuple[T]()
+        with self.assertRaises(TypeError):
+            Tuple[int]()
+
+    def test_generator(self):
+        def foo():
+            yield 42
+        g = foo()
+        self.assertIsSubclass(type(g), typing.Generator)
+
+    def test_no_generator_instantiation(self):
+        with self.assertRaises(TypeError):
+            typing.Generator()
+        with self.assertRaises(TypeError):
+            typing.Generator[T, T, T]()
+        with self.assertRaises(TypeError):
+            typing.Generator[int, int, int]()
+
+    def test_subclassing(self):
+
+        class MMA(typing.MutableMapping):
+            pass
+
+        with self.assertRaises(TypeError):  # It's abstract
+            MMA()
+
+        class MMC(MMA):
+            def __getitem__(self, k):
+                return None
+            def __setitem__(self, k, v):
+                pass
+            def __delitem__(self, k):
+                pass
+            def __iter__(self):
+                return iter(())
+            def __len__(self):
+                return 0
+
+        self.assertEqual(len(MMC()), 0)
+        assert callable(MMC.update)
+        self.assertIsInstance(MMC(), typing.Mapping)
+
+        class MMB(typing.MutableMapping[KT, VT]):
+            def __getitem__(self, k):
+                return None
+            def __setitem__(self, k, v):
+                pass
+            def __delitem__(self, k):
+                pass
+            def __iter__(self):
+                return iter(())
+            def __len__(self):
+                return 0
+
+        self.assertEqual(len(MMB()), 0)
+        self.assertEqual(len(MMB[str, str]()), 0)
+        self.assertEqual(len(MMB[KT, VT]()), 0)
+
+        self.assertNotIsSubclass(dict, MMA)
+        self.assertNotIsSubclass(dict, MMB)
+
+        self.assertIsSubclass(MMA, typing.Mapping)
+        self.assertIsSubclass(MMB, typing.Mapping)
+        self.assertIsSubclass(MMC, typing.Mapping)
+
+        self.assertIsInstance(MMB[KT, VT](), typing.Mapping)
+        self.assertIsInstance(MMB[KT, VT](), collections.Mapping)
+
+        self.assertIsSubclass(MMA, collections.Mapping)
+        self.assertIsSubclass(MMB, collections.Mapping)
+        self.assertIsSubclass(MMC, collections.Mapping)
+
+        self.assertIsSubclass(MMB[str, str], typing.Mapping)
+        self.assertIsSubclass(MMC, MMA)
+
+        class I(typing.Iterable): ...
+        self.assertNotIsSubclass(list, I)
+
+        class G(typing.Generator[int, int, int]): ...
+        def g(): yield 0
+        self.assertIsSubclass(G, typing.Generator)
+        self.assertIsSubclass(G, typing.Iterable)
+        if hasattr(collections, 'Generator'):
+            self.assertIsSubclass(G, collections.Generator)
+        self.assertIsSubclass(G, collections.Iterable)
+        self.assertNotIsSubclass(type(g), G)
+
+    def test_subclassing_subclasshook(self):
+
+        class Base(typing.Iterable):
+            @classmethod
+            def __subclasshook__(cls, other):
+                if other.__name__ == 'Foo':
+                    return True
+                else:
+                    return False
+
+        class C(Base): ...
+        class Foo: ...
+        class Bar: ...
+        self.assertIsSubclass(Foo, Base)
+        self.assertIsSubclass(Foo, C)
+        self.assertNotIsSubclass(Bar, C)
+
+    def test_subclassing_register(self):
+
+        class A(typing.Container): ...
+        class B(A): ...
+
+        class C: ...
+        A.register(C)
+        self.assertIsSubclass(C, A)
+        self.assertNotIsSubclass(C, B)
+
+        class D: ...
+        B.register(D)
+        self.assertIsSubclass(D, A)
+        self.assertIsSubclass(D, B)
+
+        class M(): ...
+        collections.MutableMapping.register(M)
+        self.assertIsSubclass(M, typing.Mapping)
+
+    def test_collections_as_base(self):
+
+        class M(collections.Mapping): ...
+        self.assertIsSubclass(M, typing.Mapping)
+        self.assertIsSubclass(M, typing.Iterable)
+
+        class S(collections.MutableSequence): ...
+        self.assertIsSubclass(S, typing.MutableSequence)
+        self.assertIsSubclass(S, typing.Iterable)
+
+        class I(collections.Iterable): ...
+        self.assertIsSubclass(I, typing.Iterable)
+
+        class A(collections.Mapping, metaclass=abc.ABCMeta): ...
+        class B: ...
+        A.register(B)
+        self.assertIsSubclass(B, typing.Mapping)
+
+
+class OtherABCTests(BaseTestCase):
+
+    @skipUnless(hasattr(typing, 'ContextManager'),
+                'requires typing.ContextManager')
+    def test_contextmanager(self):
+        @contextlib.contextmanager
+        def manager():
+            yield 42
+
+        cm = manager()
+        self.assertIsInstance(cm, typing.ContextManager)
+        self.assertNotIsInstance(42, typing.ContextManager)
+
+
+class TypeTests(BaseTestCase):
+
+    def test_type_basic(self):
+
+        class User: pass
+        class BasicUser(User): pass
+        class ProUser(User): pass
+
+        def new_user(user_class: Type[User]) -> User:
+            return user_class()
+
+        joe = new_user(BasicUser)
+
+    def test_type_typevar(self):
+
+        class User: pass
+        class BasicUser(User): pass
+        class ProUser(User): pass
+
+        U = TypeVar('U', bound=User)
+
+        def new_user(user_class: Type[U]) -> U:
+            return user_class()
+
+        joe = new_user(BasicUser)
+
+    def test_type_optional(self):
+        A = Optional[Type[BaseException]]
+
+        def foo(a: A) -> Optional[BaseException]:
+            if a is None:
+                return None
+            else:
+                return a()
+
+        assert isinstance(foo(KeyboardInterrupt), KeyboardInterrupt)
+        assert foo(None) is None
+
+
+class NewTypeTests(BaseTestCase):
+
+    def test_basic(self):
+        UserId = NewType('UserId', int)
+        UserName = NewType('UserName', str)
+        self.assertIsInstance(UserId(5), int)
+        self.assertIsInstance(UserName('Joe'), str)
+        self.assertEqual(UserId(5) + 1, 6)
+
+    def test_errors(self):
+        UserId = NewType('UserId', int)
+        UserName = NewType('UserName', str)
+        with self.assertRaises(TypeError):
+            issubclass(UserId, int)
+        with self.assertRaises(TypeError):
+            class D(UserName):
+                pass
+
+
+class NamedTupleTests(BaseTestCase):
+
+    def test_basics(self):
+        Emp = NamedTuple('Emp', [('name', str), ('id', int)])
+        self.assertIsSubclass(Emp, tuple)
+        joe = Emp('Joe', 42)
+        jim = Emp(name='Jim', id=1)
+        self.assertIsInstance(joe, Emp)
+        self.assertIsInstance(joe, tuple)
+        self.assertEqual(joe.name, 'Joe')
+        self.assertEqual(joe.id, 42)
+        self.assertEqual(jim.name, 'Jim')
+        self.assertEqual(jim.id, 1)
+        self.assertEqual(Emp.__name__, 'Emp')
+        self.assertEqual(Emp._fields, ('name', 'id'))
+        self.assertEqual(Emp._field_types, dict(name=str, id=int))
+
+    @skipUnless(PY36, 'Python 3.6 required')
+    def test_annotation_usage(self):
+        tim = CoolEmployee('Tim', 9000)
+        self.assertIsInstance(tim, CoolEmployee)
+        self.assertIsInstance(tim, tuple)
+        self.assertEqual(tim.name, 'Tim')
+        self.assertEqual(tim.cool, 9000)
+        self.assertEqual(CoolEmployee.__name__, 'CoolEmployee')
+        self.assertEqual(CoolEmployee._fields, ('name', 'cool'))
+        self.assertEqual(CoolEmployee._field_types, dict(name=str, cool=int))
+
+    @skipUnless(PY36, 'Python 3.6 required')
+    def test_namedtuple_keyword_usage(self):
+        LocalEmployee = NamedTuple("LocalEmployee", name=str, age=int)
+        nick = LocalEmployee('Nick', 25)
+        self.assertIsInstance(nick, tuple)
+        self.assertEqual(nick.name, 'Nick')
+        self.assertEqual(LocalEmployee.__name__, 'LocalEmployee')
+        self.assertEqual(LocalEmployee._fields, ('name', 'age'))
+        self.assertEqual(LocalEmployee._field_types, dict(name=str, age=int))
+        with self.assertRaises(TypeError):
+            NamedTuple('Name', [('x', int)], y=str)
+        with self.assertRaises(TypeError):
+            NamedTuple('Name', x=1, y='a')
+
+    def test_pickle(self):
+        global Emp  # pickle wants to reference the class by name
+        Emp = NamedTuple('Emp', [('name', str), ('id', int)])
+        jane = Emp('jane', 37)
+        for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+            z = pickle.dumps(jane, proto)
+            jane2 = pickle.loads(z)
+            self.assertEqual(jane2, jane)
+
+
+class IOTests(BaseTestCase):
+
+    def test_io(self):
+
+        def stuff(a: IO) -> AnyStr:
+            return a.readline()
+
+        a = stuff.__annotations__['a']
+        self.assertEqual(a.__parameters__, (AnyStr,))
+
+    def test_textio(self):
+
+        def stuff(a: TextIO) -> str:
+            return a.readline()
+
+        a = stuff.__annotations__['a']
+        self.assertEqual(a.__parameters__, ())
+
+    def test_binaryio(self):
+
+        def stuff(a: BinaryIO) -> bytes:
+            return a.readline()
+
+        a = stuff.__annotations__['a']
+        self.assertEqual(a.__parameters__, ())
+
+    def test_io_submodule(self):
+        from typing.io import IO, TextIO, BinaryIO, __all__, __name__
+        self.assertIs(IO, typing.IO)
+        self.assertIs(TextIO, typing.TextIO)
+        self.assertIs(BinaryIO, typing.BinaryIO)
+        self.assertEqual(set(__all__), set(['IO', 'TextIO', 'BinaryIO']))
+        self.assertEqual(__name__, 'typing.io')
+
+
+class RETests(BaseTestCase):
+    # Much of this is really testing _TypeAlias.
+
+    def test_basics(self):
+        pat = re.compile('[a-z]+', re.I)
+        self.assertIsSubclass(pat.__class__, Pattern)
+        self.assertIsSubclass(type(pat), Pattern)
+        self.assertIsInstance(pat, Pattern)
+
+        mat = pat.search('12345abcde.....')
+        self.assertIsSubclass(mat.__class__, Match)
+        self.assertIsSubclass(type(mat), Match)
+        self.assertIsInstance(mat, Match)
+
+        # these should just work
+        p = Pattern[Union[str, bytes]]
+        m = Match[Union[bytes, str]]
+
+    def test_errors(self):
+        with self.assertRaises(TypeError):
+            # Doesn't fit AnyStr.
+            Pattern[int]
+        with self.assertRaises(TypeError):
+            # Can't change type vars?
+            Match[T]
+        m = Match[Union[str, bytes]]
+        with self.assertRaises(TypeError):
+            # Too complicated?
+            m[str]
+        with self.assertRaises(TypeError):
+            # We don't support isinstance().
+            isinstance(42, Pattern[str])
+
+    def test_repr(self):
+        self.assertEqual(repr(Pattern), 'Pattern[~AnyStr]')
+        self.assertEqual(repr(Pattern[str]), 'Pattern[str]')
+        self.assertEqual(repr(Pattern[bytes]), 'Pattern[bytes]')
+        self.assertEqual(repr(Match), 'Match[~AnyStr]')
+        self.assertEqual(repr(Match[str]), 'Match[str]')
+        self.assertEqual(repr(Match[bytes]), 'Match[bytes]')
+
+    def test_re_submodule(self):
+        from typing.re import Match, Pattern, __all__, __name__
+        self.assertIs(Match, typing.Match)
+        self.assertIs(Pattern, typing.Pattern)
+        self.assertEqual(set(__all__), set(['Match', 'Pattern']))
+        self.assertEqual(__name__, 'typing.re')
+
+    def test_cannot_subclass(self):
+        with self.assertRaises(TypeError) as ex:
+
+            class A(typing.Match):
+                pass
+
+        self.assertEqual(str(ex.exception),
+                         "Cannot subclass typing._TypeAlias")
+
+
+class AllTests(BaseTestCase):
+    """Tests for __all__."""
+
+    def test_all(self):
+        from typing import __all__ as a
+        # Just spot-check the first and last of every category.
+        self.assertIn('AbstractSet', a)
+        self.assertIn('ValuesView', a)
+        self.assertIn('cast', a)
+        self.assertIn('overload', a)
+        if hasattr(contextlib, 'AbstractContextManager'):
+            self.assertIn('ContextManager', a)
+        # Check that io and re are not exported.
+        self.assertNotIn('io', a)
+        self.assertNotIn('re', a)
+        # Spot-check that stdlib modules aren't exported.
+        self.assertNotIn('os', a)
+        self.assertNotIn('sys', a)
+        # Check that Text is defined.
+        self.assertIn('Text', a)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/lib-typing/3.2/typing.py b/lib-typing/3.2/typing.py
new file mode 100644
index 0000000..34845b7
--- /dev/null
+++ b/lib-typing/3.2/typing.py
@@ -0,0 +1,2160 @@
+import abc
+from abc import abstractmethod, abstractproperty
+import collections
+import contextlib
+import functools
+import re as stdlib_re  # Avoid confusion with the re we export.
+import sys
+import types
+try:
+    import collections.abc as collections_abc
+except ImportError:
+    import collections as collections_abc  # Fallback for PY3.2.
+
+
+# Please keep __all__ alphabetized within each category.
+__all__ = [
+    # Super-special typing primitives.
+    'Any',
+    'Callable',
+    'ClassVar',
+    'Generic',
+    'Optional',
+    'Tuple',
+    'Type',
+    'TypeVar',
+    'Union',
+
+    # ABCs (from collections.abc).
+    'AbstractSet',  # collections.abc.Set.
+    'ByteString',
+    'Container',
+    'Hashable',
+    'ItemsView',
+    'Iterable',
+    'Iterator',
+    'KeysView',
+    'Mapping',
+    'MappingView',
+    'MutableMapping',
+    'MutableSequence',
+    'MutableSet',
+    'Sequence',
+    'Sized',
+    'ValuesView',
+    # The following are added depending on presence
+    # of their non-generic counterparts in stdlib:
+    # Awaitable,
+    # AsyncIterator,
+    # AsyncIterable,
+    # Coroutine,
+    # Collection,
+    # ContextManager
+
+    # Structural checks, a.k.a. protocols.
+    'Reversible',
+    'SupportsAbs',
+    'SupportsFloat',
+    'SupportsInt',
+    'SupportsRound',
+
+    # Concrete collection types.
+    'Dict',
+    'DefaultDict',
+    'List',
+    'Set',
+    'FrozenSet',
+    'NamedTuple',  # Not really a type.
+    'Generator',
+
+    # One-off things.
+    'AnyStr',
+    'cast',
+    'get_type_hints',
+    'NewType',
+    'no_type_check',
+    'no_type_check_decorator',
+    'overload',
+    'Text',
+    'TYPE_CHECKING',
+]
+
+# The pseudo-submodules 're' and 'io' are part of the public
+# namespace, but excluded from __all__ because they might stomp on
+# legitimate imports of those modules.
+
+
+def _qualname(x):
+    if sys.version_info[:2] >= (3, 3):
+        return x.__qualname__
+    else:
+        # Fall back to just name.
+        return x.__name__
+
+
+def _trim_name(nm):
+    if nm.startswith('_') and nm not in ('_TypeAlias',
+                    '_ForwardRef', '_TypingBase', '_FinalTypingBase'):
+        nm = nm[1:]
+    return nm
+
+
+class TypingMeta(type):
+    """Metaclass for most types defined in typing module
+    (not a part of public API).
+
+    This overrides __new__() to require an extra keyword parameter
+    '_root', which serves as a guard against naive subclassing of the
+    typing classes.  Any legitimate class defined using a metaclass
+    derived from TypingMeta must pass _root=True.
+
+    This also defines a dummy constructor (all the work for most typing
+    constructs is done in __new__) and a nicer repr().
+    """
+
+    _is_protocol = False
+
+    def __new__(cls, name, bases, namespace, *, _root=False):
+        if not _root:
+            raise TypeError("Cannot subclass %s" %
+                            (', '.join(map(_type_repr, bases)) or '()'))
+        return super().__new__(cls, name, bases, namespace)
+
+    def __init__(self, *args, **kwds):
+        pass
+
+    def _eval_type(self, globalns, localns):
+        """Override this in subclasses to interpret forward references.
+
+        For example, List['C'] is internally stored as
+        List[_ForwardRef('C')], which should evaluate to List[C],
+        where C is an object found in globalns or localns (searching
+        localns first, of course).
+        """
+        return self
+
+    def _get_type_vars(self, tvars):
+        pass
+
+    def __repr__(self):
+        qname = _trim_name(_qualname(self))
+        return '%s.%s' % (self.__module__, qname)
+
+
+class _TypingBase(metaclass=TypingMeta, _root=True):
+    """Internal indicator of special typing constructs."""
+
+    __slots__ = ()
+
+    def __init__(self, *args, **kwds):
+        pass
+
+    def __new__(cls, *args, **kwds):
+        """Constructor.
+
+        This only exists to give a better error message in case
+        someone tries to subclass a special typing object (not a good idea).
+        """
+        if (len(args) == 3 and
+                isinstance(args[0], str) and
+                isinstance(args[1], tuple)):
+            # Close enough.
+            raise TypeError("Cannot subclass %r" % cls)
+        return super().__new__(cls)
+
+    # Things that are not classes also need these.
+    def _eval_type(self, globalns, localns):
+        return self
+
+    def _get_type_vars(self, tvars):
+        pass
+
+    def __repr__(self):
+        cls = type(self)
+        qname = _trim_name(_qualname(cls))
+        return '%s.%s' % (cls.__module__, qname)
+
+    def __call__(self, *args, **kwds):
+        raise TypeError("Cannot instantiate %r" % type(self))
+
+
+class _FinalTypingBase(_TypingBase, _root=True):
+    """Internal mix-in class to prevent instantiation.
+
+    Prevents instantiation unless _root=True is given in class call.
+    It is used to create pseudo-singleton instances Any, Union, Optional, etc.
+    """
+
+    __slots__ = ()
+
+    def __new__(cls, *args, _root=False, **kwds):
+        self = super().__new__(cls, *args, **kwds)
+        if _root is True:
+            return self
+        raise TypeError("Cannot instantiate %r" % cls)
+
+    def __reduce__(self):
+        return _trim_name(type(self).__name__)
+
+
+class _ForwardRef(_TypingBase, _root=True):
+    """Internal wrapper to hold a forward reference."""
+
+    __slots__ = ('__forward_arg__', '__forward_code__',
+                 '__forward_evaluated__', '__forward_value__')
+
+    def __init__(self, arg):
+        super().__init__(arg)
+        if not isinstance(arg, str):
+            raise TypeError('Forward reference must be a string -- got %r' % (arg,))
+        try:
+            code = compile(arg, '<string>', 'eval')
+        except SyntaxError:
+            raise SyntaxError('Forward reference must be an expression -- got %r' %
+                              (arg,))
+        self.__forward_arg__ = arg
+        self.__forward_code__ = code
+        self.__forward_evaluated__ = False
+        self.__forward_value__ = None
+
+    def _eval_type(self, globalns, localns):
+        if not self.__forward_evaluated__ or localns is not globalns:
+            if globalns is None and localns is None:
+                globalns = localns = {}
+            elif globalns is None:
+                globalns = localns
+            elif localns is None:
+                localns = globalns
+            self.__forward_value__ = _type_check(
+                eval(self.__forward_code__, globalns, localns),
+                "Forward references must evaluate to types.")
+            self.__forward_evaluated__ = True
+        return self.__forward_value__
+
+    def __eq__(self, other):
+        if not isinstance(other, _ForwardRef):
+            return NotImplemented
+        return (self.__forward_arg__ == other.__forward_arg__ and
+                self.__forward_value__ == other.__forward_value__)
+
+    def __hash__(self):
+        return hash((self.__forward_arg__, self.__forward_value__))
+
+    def __instancecheck__(self, obj):
+        raise TypeError("Forward references cannot be used with isinstance().")
+
+    def __subclasscheck__(self, cls):
+        raise TypeError("Forward references cannot be used with issubclass().")
+
+    def __repr__(self):
+        return '_ForwardRef(%r)' % (self.__forward_arg__,)
+
+
+class _TypeAlias(_TypingBase, _root=True):
+    """Internal helper class for defining generic variants of concrete types.
+
+    Note that this is not a type; let's call it a pseudo-type.  It cannot
+    be used in instance and subclass checks in parameterized form, i.e.
+    ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning
+    ``False``.
+    """
+
+    __slots__ = ('name', 'type_var', 'impl_type', 'type_checker')
+
+    def __init__(self, name, type_var, impl_type, type_checker):
+        """Initializer.
+
+        Args:
+            name: The name, e.g. 'Pattern'.
+            type_var: The type parameter, e.g. AnyStr, or the
+                specific type, e.g. str.
+            impl_type: The implementation type.
+            type_checker: Function that takes an impl_type instance.
+                and returns a value that should be a type_var instance.
+        """
+        assert isinstance(name, str), repr(name)
+        assert isinstance(impl_type, type), repr(impl_type)
+        assert not isinstance(impl_type, TypingMeta), repr(impl_type)
+        assert isinstance(type_var, (type, _TypingBase)), repr(type_var)
+        self.name = name
+        self.type_var = type_var
+        self.impl_type = impl_type
+        self.type_checker = type_checker
+
+    def __repr__(self):
+        return "%s[%s]" % (self.name, _type_repr(self.type_var))
+
+    def __getitem__(self, parameter):
+        if not isinstance(self.type_var, TypeVar):
+            raise TypeError("%s cannot be further parameterized." % self)
+        if self.type_var.__constraints__ and isinstance(parameter, type):
+            if not issubclass(parameter, self.type_var.__constraints__):
+                raise TypeError("%s is not a valid substitution for %s." %
+                                (parameter, self.type_var))
+        if isinstance(parameter, TypeVar) and parameter is not self.type_var:
+            raise TypeError("%s cannot be re-parameterized." % self)
+        return self.__class__(self.name, parameter,
+                              self.impl_type, self.type_checker)
+
+    def __eq__(self, other):
+        if not isinstance(other, _TypeAlias):
+            return NotImplemented
+        return self.name == other.name and self.type_var == other.type_var
+
+    def __hash__(self):
+        return hash((self.name, self.type_var))
+
+    def __instancecheck__(self, obj):
+        if not isinstance(self.type_var, TypeVar):
+            raise TypeError("Parameterized type aliases cannot be used "
+                            "with isinstance().")
+        return isinstance(obj, self.impl_type)
+
+    def __subclasscheck__(self, cls):
+        if not isinstance(self.type_var, TypeVar):
+            raise TypeError("Parameterized type aliases cannot be used "
+                            "with issubclass().")
+        return issubclass(cls, self.impl_type)
+
+
+def _get_type_vars(types, tvars):
+    for t in types:
+        if isinstance(t, TypingMeta) or isinstance(t, _TypingBase):
+            t._get_type_vars(tvars)
+
+
+def _type_vars(types):
+    tvars = []
+    _get_type_vars(types, tvars)
+    return tuple(tvars)
+
+
+def _eval_type(t, globalns, localns):
+    if isinstance(t, TypingMeta) or isinstance(t, _TypingBase):
+        return t._eval_type(globalns, localns)
+    return t
+
+
+def _type_check(arg, msg):
+    """Check that the argument is a type, and return it (internal helper).
+
+    As a special case, accept None and return type(None) instead.
+    Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable.
+
+    The msg argument is a human-readable error message, e.g.
+
+        "Union[arg, ...]: arg should be a type."
+
+    We append the repr() of the actual value (truncated to 100 chars).
+    """
+    if arg is None:
+        return type(None)
+    if isinstance(arg, str):
+        arg = _ForwardRef(arg)
+    if (isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or
+        not isinstance(arg, (type, _TypingBase)) and not callable(arg)):
+        raise TypeError(msg + " Got %.100r." % (arg,))
+    # Bare Union etc. are not valid as type arguments
+    if (type(arg).__name__ in ('_Union', '_Optional')
+        and not getattr(arg, '__origin__', None)
+        or isinstance(arg, TypingMeta) and _gorg(arg) in (Generic, _Protocol)):
+        raise TypeError("Plain %s is not valid as type argument" % arg)
+    return arg
+
+
+def _type_repr(obj):
+    """Return the repr() of an object, special-casing types (internal helper).
+
+    If obj is a type, we return a shorter version than the default
+    type.__repr__, based on the module and qualified name, which is
+    typically enough to uniquely identify a type.  For everything
+    else, we fall back on repr(obj).
+    """
+    if isinstance(obj, type) and not isinstance(obj, TypingMeta):
+        if obj.__module__ == 'builtins':
+            return _qualname(obj)
+        return '%s.%s' % (obj.__module__, _qualname(obj))
+    if obj is ...:
+        return('...')
+    if isinstance(obj, types.FunctionType):
+        return obj.__name__
+    return repr(obj)
+
+
+class _Any(_FinalTypingBase, _root=True):
+    """Special type indicating an unconstrained type.
+
+    - Any is compatible with every type.
+    - Any assumed to have all methods.
+    - All values assumed to be instances of Any.
+
+    Note that all the above statements are true from the point of view of
+    static type checkers. At runtime, Any should not be used with instance
+    or class checks.
+    """
+
+    __slots__ = ()
+
+    def __instancecheck__(self, obj):
+        raise TypeError("Any cannot be used with isinstance().")
+
+    def __subclasscheck__(self, cls):
+        raise TypeError("Any cannot be used with issubclass().")
+
+
+Any = _Any(_root=True)
+
+
+class TypeVar(_TypingBase, _root=True):
+    """Type variable.
+
+    Usage::
+
+      T = TypeVar('T')  # Can be anything
+      A = TypeVar('A', str, bytes)  # Must be str or bytes
+
+    Type variables exist primarily for the benefit of static type
+    checkers.  They serve as the parameters for generic types as well
+    as for generic function definitions.  See class Generic for more
+    information on generic types.  Generic functions work as follows:
+
+      def repeat(x: T, n: int) -> List[T]:
+          '''Return a list containing n references to x.'''
+          return [x]*n
+
+      def longest(x: A, y: A) -> A:
+          '''Return the longest of two strings.'''
+          return x if len(x) >= len(y) else y
+
+    The latter example's signature is essentially the overloading
+    of (str, str) -> str and (bytes, bytes) -> bytes.  Also note
+    that if the arguments are instances of some subclass of str,
+    the return type is still plain str.
+
+    At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError.
+
+    Type variables defined with covariant=True or contravariant=True
+    can be used do declare covariant or contravariant generic types.
+    See PEP 484 for more details. By default generic types are invariant
+    in all type variables.
+
+    Type variables can be introspected. e.g.:
+
+      T.__name__ == 'T'
+      T.__constraints__ == ()
+      T.__covariant__ == False
+      T.__contravariant__ = False
+      A.__constraints__ == (str, bytes)
+    """
+
+    __slots__ = ('__name__', '__bound__', '__constraints__',
+                 '__covariant__', '__contravariant__')
+
+    def __init__(self, name, *constraints, bound=None,
+                covariant=False, contravariant=False):
+        super().__init__(name, *constraints, bound=bound,
+                         covariant=covariant, contravariant=contravariant)
+        self.__name__ = name
+        if covariant and contravariant:
+            raise ValueError("Bivariant types are not supported.")
+        self.__covariant__ = bool(covariant)
+        self.__contravariant__ = bool(contravariant)
+        if constraints and bound is not None:
+            raise TypeError("Constraints cannot be combined with bound=...")
+        if constraints and len(constraints) == 1:
+            raise TypeError("A single constraint is not allowed")
+        msg = "TypeVar(name, constraint, ...): constraints must be types."
+        self.__constraints__ = tuple(_type_check(t, msg) for t in constraints)
+        if bound:
+            self.__bound__ = _type_check(bound, "Bound must be a type.")
+        else:
+            self.__bound__ = None
+
+    def _get_type_vars(self, tvars):
+        if self not in tvars:
+            tvars.append(self)
+
+    def __repr__(self):
+        if self.__covariant__:
+            prefix = '+'
+        elif self.__contravariant__:
+            prefix = '-'
+        else:
+            prefix = '~'
+        return prefix + self.__name__
+
+    def __instancecheck__(self, instance):
+        raise TypeError("Type variables cannot be used with isinstance().")
+
+    def __subclasscheck__(self, cls):
+        raise TypeError("Type variables cannot be used with issubclass().")
+
+
+# Some unconstrained type variables.  These are used by the container types.
+# (These are not for export.)
+T = TypeVar('T')  # Any type.
+KT = TypeVar('KT')  # Key type.
+VT = TypeVar('VT')  # Value type.
+T_co = TypeVar('T_co', covariant=True)  # Any type covariant containers.
+V_co = TypeVar('V_co', covariant=True)  # Any type covariant containers.
+VT_co = TypeVar('VT_co', covariant=True)  # Value type covariant containers.
+T_contra = TypeVar('T_contra', contravariant=True)  # Ditto contravariant.
+
+# A useful type variable with constraints.  This represents string types.
+# (This one *is* for export!)
+AnyStr = TypeVar('AnyStr', bytes, str)
+
+
+def _replace_arg(arg, tvars, args):
+    """An internal helper function: replace arg if it is a type variable
+    found in tvars with corresponding substitution from args or
+    with corresponding substitution sub-tree if arg is a generic type.
+    """
+
+    if tvars is None:
+        tvars = []
+    if hasattr(arg, '_subs_tree'):
+        return arg._subs_tree(tvars, args)
+    if isinstance(arg, TypeVar):
+        for i, tvar in enumerate(tvars):
+            if arg == tvar:
+                return args[i]
+    return arg
+
+
+def _subs_tree(cls, tvars=None, args=None):
+    """An internal helper function: calculate substitution tree
+    for generic cls after replacing its type parameters with
+    substitutions in tvars -> args (if any).
+    Repeat the same following __origin__'s.
+
+    Return a list of arguments with all possible substitutions
+    performed. Arguments that are generic classes themselves are represented
+    as tuples (so that no new classes are created by this function).
+    For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)]
+    """
+
+    if cls.__origin__ is None:
+        return cls
+    # Make of chain of origins (i.e. cls -> cls.__origin__)
+    current = cls.__origin__
+    orig_chain = []
+    while current.__origin__ is not None:
+        orig_chain.append(current)
+        current = current.__origin__
+    # Replace type variables in __args__ if asked ...
+    tree_args = []
+    for arg in cls.__args__:
+        tree_args.append(_replace_arg(arg, tvars, args))
+    # ... then continue replacing down the origin chain.
+    for ocls in orig_chain:
+        new_tree_args = []
+        for i, arg in enumerate(ocls.__args__):
+            new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args))
+        tree_args = new_tree_args
+    return tree_args
+
+
+def _remove_dups_flatten(parameters):
+    """An internal helper for Union creation and substitution: flatten Union's
+    among parameters, then remove duplicates and strict subclasses.
+    """
+
+    # Flatten out Union[Union[...], ...].
+    params = []
+    for p in parameters:
+        if isinstance(p, _Union) and p.__origin__ is Union:
+            params.extend(p.__args__)
+        elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union:
+            params.extend(p[1:])
+        else:
+            params.append(p)
+    # Weed out strict duplicates, preserving the first of each occurrence.
+    all_params = set(params)
+    if len(all_params) < len(params):
+        new_params = []
+        for t in params:
+            if t in all_params:
+                new_params.append(t)
+                all_params.remove(t)
+        params = new_params
+        assert not all_params, all_params
+    # Weed out subclasses.
+    # E.g. Union[int, Employee, Manager] == Union[int, Employee].
+    # If object is present it will be sole survivor among proper classes.
+    # Never discard type variables.
+    # (In particular, Union[str, AnyStr] != AnyStr.)
+    all_params = set(params)
+    for t1 in params:
+        if not isinstance(t1, type):
+            continue
+        if any(isinstance(t2, type) and issubclass(t1, t2)
+               for t2 in all_params - {t1}
+               if not (isinstance(t2, GenericMeta) and
+                       t2.__origin__ is not None)):
+            all_params.remove(t1)
+    return tuple(t for t in params if t in all_params)
+
+
+def _check_generic(cls, parameters):
+    # Check correct count for parameters of a generic cls (internal helper).
+    if not cls.__parameters__:
+        raise TypeError("%s is not a generic class" % repr(cls))
+    alen = len(parameters)
+    elen = len(cls.__parameters__)
+    if alen != elen:
+        raise TypeError("Too %s parameters for %s; actual %s, expected %s" %
+                        ("many" if alen > elen else "few", repr(cls), alen, elen))
+
+
+_cleanups = []
+
+
+def _tp_cache(func):
+    """Internal wrapper caching __getitem__ of generic types with a fallback to
+    original function for non-hashable arguments.
+    """
+
+    cached = functools.lru_cache()(func)
+    _cleanups.append(cached.cache_clear)
+    @functools.wraps(func)
+    def inner(*args, **kwds):
+        try:
+            return cached(*args, **kwds)
+        except TypeError:
+            pass  # All real errors (not unhashable args) are raised below.
+        return func(*args, **kwds)
+    return inner
+
+
+class _Union(_FinalTypingBase, _root=True):
+    """Union type; Union[X, Y] means either X or Y.
+
+    To define a union, use e.g. Union[int, str].  Details:
+
+    - The arguments must be types and there must be at least one.
+
+    - None as an argument is a special case and is replaced by
+      type(None).
+
+    - Unions of unions are flattened, e.g.::
+
+        Union[Union[int, str], float] == Union[int, str, float]
+
+    - Unions of a single argument vanish, e.g.::
+
+        Union[int] == int  # The constructor actually returns int
+
+    - Redundant arguments are skipped, e.g.::
+
+        Union[int, str, int] == Union[int, str]
+
+    - When comparing unions, the argument order is ignored, e.g.::
+
+        Union[int, str] == Union[str, int]
+
+    - When two arguments have a subclass relationship, the least
+      derived argument is kept, e.g.::
+
+        class Employee: pass
+        class Manager(Employee): pass
+        Union[int, Employee, Manager] == Union[int, Employee]
+        Union[Manager, int, Employee] == Union[int, Employee]
+        Union[Employee, Manager] == Employee
+
+    - Similar for object::
+
+        Union[int, object] == object
+
+    - You cannot subclass or instantiate a union.
+
+    - You can use Optional[X] as a shorthand for Union[X, None].
+    """
+
+    __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__')
+
+    def __new__(cls, parameters=None, origin=None, *args, _root=False):
+        self = super().__new__(cls, parameters, origin, *args, _root=_root)
+        if origin is None:
+            self.__parameters__ = None
+            self.__args__ = None
+            self.__origin__ = None
+            self.__tree_hash__ = hash(frozenset(('Union',)))
+            return self
+        if not isinstance(parameters, tuple):
+            raise TypeError("Expected parameters=<tuple>")
+        if origin is Union:
+            parameters = _remove_dups_flatten(parameters)
+            # It's not a union if there's only one type left.
+            if len(parameters) == 1:
+                return parameters[0]
+        self.__parameters__ = _type_vars(parameters)
+        self.__args__ = parameters
+        self.__origin__ = origin
+        # Pre-calculate the __hash__ on instantiation.
+        # This improves speed for complex substitutions.
+        subs_tree = self._subs_tree()
+        if isinstance(subs_tree, tuple):
+            self.__tree_hash__ = hash(frozenset(subs_tree))
+        else:
+            self.__tree_hash__ = hash(subs_tree)
+        return self
+
+    def _eval_type(self, globalns, localns):
+        if self.__args__ is None:
+            return self
+        ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__)
+        ev_origin = _eval_type(self.__origin__, globalns, localns)
+        if ev_args == self.__args__ and ev_origin == self.__origin__:
+            # Everything is already evaluated.
+            return self
+        return self.__class__(ev_args, ev_origin, _root=True)
+
+    def _get_type_vars(self, tvars):
+        if self.__origin__ and self.__parameters__:
+            _get_type_vars(self.__parameters__, tvars)
+
+    def __repr__(self):
+        if self.__origin__ is None:
+            return super().__repr__()
+        tree = self._subs_tree()
+        if not isinstance(tree, tuple):
+            return repr(tree)
+        return tree[0]._tree_repr(tree)
+
+    def _tree_repr(self, tree):
+        arg_list = []
+        for arg in tree[1:]:
+            if not isinstance(arg, tuple):
+                arg_list.append(_type_repr(arg))
+            else:
+                arg_list.append(arg[0]._tree_repr(arg))
+        return super().__repr__() + '[%s]' % ', '.join(arg_list)
+
+    @_tp_cache
+    def __getitem__(self, parameters):
+        if parameters == ():
+            raise TypeError("Cannot take a Union of no types.")
+        if not isinstance(parameters, tuple):
+            parameters = (parameters,)
+        if self.__origin__ is None:
+            msg = "Union[arg, ...]: each arg must be a type."
+        else:
+            msg = "Parameters to generic types must be types."
+        parameters = tuple(_type_check(p, msg) for p in parameters)
+        if self is not Union:
+            _check_generic(self, parameters)
+        return self.__class__(parameters, origin=self, _root=True)
+
+    def _subs_tree(self, tvars=None, args=None):
+        if self is Union:
+            return Union  # Nothing to substitute
+        tree_args = _subs_tree(self, tvars, args)
+        tree_args = _remove_dups_flatten(tree_args)
+        if len(tree_args) == 1:
+            return tree_args[0]  # Union of a single type is that type
+        return (Union,) + tree_args
+
+    def __eq__(self, other):
+        if not isinstance(other, _Union):
+            return self._subs_tree() == other
+        return self.__tree_hash__ == other.__tree_hash__
+
+    def __hash__(self):
+        return self.__tree_hash__
+
+    def __instancecheck__(self, obj):
+        raise TypeError("Unions cannot be used with isinstance().")
+
+    def __subclasscheck__(self, cls):
+        raise TypeError("Unions cannot be used with issubclass().")
+
+
+Union = _Union(_root=True)
+
+
+class _Optional(_FinalTypingBase, _root=True):
+    """Optional type.
+
+    Optional[X] is equivalent to Union[X, None].
+    """
+
+    __slots__ = ()
+
+    @_tp_cache
+    def __getitem__(self, arg):
+        arg = _type_check(arg, "Optional[t] requires a single type.")
+        return Union[arg, type(None)]
+
+
+Optional = _Optional(_root=True)
+
+
+def _gorg(a):
+    """Return the farthest origin of a generic class (internal helper)."""
+    assert isinstance(a, GenericMeta)
+    while a.__origin__ is not None:
+        a = a.__origin__
+    return a
+
+
+def _geqv(a, b):
+    """Return whether two generic classes are equivalent (internal helper).
+
+    The intention is to consider generic class X and any of its
+    parameterized forms (X[T], X[int], etc.) as equivalent.
+
+    However, X is not equivalent to a subclass of X.
+
+    The relation is reflexive, symmetric and transitive.
+    """
+    assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta)
+    # Reduce each to its origin.
+    return _gorg(a) is _gorg(b)
+
+
+def _next_in_mro(cls):
+    """Helper for Generic.__new__.
+
+    Returns the class after the last occurrence of Generic or
+    Generic[...] in cls.__mro__.
+    """
+    next_in_mro = object
+    # Look for the last occurrence of Generic or Generic[...].
+    for i, c in enumerate(cls.__mro__[:-1]):
+        if isinstance(c, GenericMeta) and _gorg(c) is Generic:
+            next_in_mro = cls.__mro__[i+1]
+    return next_in_mro
+
+
+def _valid_for_check(cls):
+    """An internal helper to prohibit isinstance([1], List[str]) etc."""
+    if cls is Generic:
+        raise TypeError("Class %r cannot be used with class "
+                        "or instance checks" % cls)
+    if (cls.__origin__ is not None and
+        sys._getframe(3).f_globals['__name__'] not in ['abc', 'functools']):
+        raise TypeError("Parameterized generics cannot be used with class "
+                        "or instance checks")
+
+
+def _make_subclasshook(cls):
+    """Construct a __subclasshook__ callable that incorporates
+    the associated __extra__ class in subclass checks performed
+    against cls.
+    """
+    if isinstance(cls.__extra__, abc.ABCMeta):
+        # The logic mirrors that of ABCMeta.__subclasscheck__.
+        # Registered classes need not be checked here because
+        # cls and its extra share the same _abc_registry.
+        def __extrahook__(subclass):
+            _valid_for_check(cls)
+            res = cls.__extra__.__subclasshook__(subclass)
+            if res is not NotImplemented:
+                return res
+            if cls.__extra__ in subclass.__mro__:
+                return True
+            for scls in cls.__extra__.__subclasses__():
+                if isinstance(scls, GenericMeta):
+                    continue
+                if issubclass(subclass, scls):
+                    return True
+            return NotImplemented
+    else:
+        # For non-ABC extras we'll just call issubclass().
+        def __extrahook__(subclass):
+            _valid_for_check(cls)
+            if cls.__extra__ and issubclass(subclass, cls.__extra__):
+                return True
+            return NotImplemented
+    return __extrahook__
+
+
+def _no_slots_copy(dct):
+    """Internal helper: copy class __dict__ and clean slots class variables.
+    (They will be re-created if necessary by normal class machinery.)
+    """
+    dict_copy = dict(dct)
+    if '__slots__' in dict_copy:
+        for slot in dict_copy['__slots__']:
+            dict_copy.pop(slot, None)
+    return dict_copy
+
+
+class GenericMeta(TypingMeta, abc.ABCMeta):
+    """Metaclass for generic types."""
+
+    def __new__(cls, name, bases, namespace,
+                tvars=None, args=None, origin=None, extra=None, orig_bases=None):
+        if tvars is not None:
+            # Called from __getitem__() below.
+            assert origin is not None
+            assert all(isinstance(t, TypeVar) for t in tvars), tvars
+        else:
+            # Called from class statement.
+            assert tvars is None, tvars
+            assert args is None, args
+            assert origin is None, origin
+
+            # Get the full set of tvars from the bases.
+            tvars = _type_vars(bases)
+            # Look for Generic[T1, ..., Tn].
+            # If found, tvars must be a subset of it.
+            # If not found, tvars is it.
+            # Also check for and reject plain Generic,
+            # and reject multiple Generic[...].
+            gvars = None
+            for base in bases:
+                if base is Generic:
+                    raise TypeError("Cannot inherit from plain Generic")
+                if (isinstance(base, GenericMeta) and
+                        base.__origin__ is Generic):
+                    if gvars is not None:
+                        raise TypeError(
+                            "Cannot inherit from Generic[...] multiple types.")
+                    gvars = base.__parameters__
+            if gvars is None:
+                gvars = tvars
+            else:
+                tvarset = set(tvars)
+                gvarset = set(gvars)
+                if not tvarset <= gvarset:
+                    raise TypeError(
+                        "Some type variables (%s) "
+                        "are not listed in Generic[%s]" %
+                        (", ".join(str(t) for t in tvars if t not in gvarset),
+                         ", ".join(str(g) for g in gvars)))
+                tvars = gvars
+
+        initial_bases = bases
+        if extra is not None and type(extra) is abc.ABCMeta and extra not in bases:
+            bases = (extra,) + bases
+        bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b for b in bases)
+
+        # remove bare Generic from bases if there are other generic bases
+        if any(isinstance(b, GenericMeta) and b is not Generic for b in bases):
+            bases = tuple(b for b in bases if b is not Generic)
+        self = super().__new__(cls, name, bases, namespace, _root=True)
+
+        self.__parameters__ = tvars
+        # Be prepared that GenericMeta will be subclassed by TupleMeta
+        # and CallableMeta, those two allow ..., (), or [] in __args___.
+        self.__args__ = tuple(... if a is _TypingEllipsis else
+                              () if a is _TypingEmpty else
+                              a for a in args) if args else None
+        self.__origin__ = origin
+        self.__extra__ = extra
+        # Speed hack (https://github.com/python/typing/issues/196).
+        self.__next_in_mro__ = _next_in_mro(self)
+        # Preserve base classes on subclassing (__bases__ are type erased now).
+        if orig_bases is None:
+            self.__orig_bases__ = initial_bases
+
+        # This allows unparameterized generic collections to be used
+        # with issubclass() and isinstance() in the same way as their
+        # collections.abc counterparts (e.g., isinstance([], Iterable)).
+        if ('__subclasshook__' not in namespace and extra  # allow overriding
+            or hasattr(self.__subclasshook__, '__name__') and
+            self.__subclasshook__.__name__ == '__extrahook__'):
+            self.__subclasshook__ = _make_subclasshook(self)
+        if isinstance(extra, abc.ABCMeta):
+            self._abc_registry = extra._abc_registry
+
+        if origin and hasattr(origin, '__qualname__'):  # Fix for Python 3.2.
+            self.__qualname__ = origin.__qualname__
+        self.__tree_hash__ = hash(self._subs_tree()) if origin else hash((self.__name__,))
+        return self
+
+    def _get_type_vars(self, tvars):
+        if self.__origin__ and self.__parameters__:
+            _get_type_vars(self.__parameters__, tvars)
+
+    def _eval_type(self, globalns, localns):
+        ev_origin = (self.__origin__._eval_type(globalns, localns)
+                     if self.__origin__ else None)
+        ev_args = tuple(_eval_type(a, globalns, localns) for a
+                        in self.__args__) if self.__args__ else None
+        if ev_origin == self.__origin__ and ev_args == self.__args__:
+            return self
+        return self.__class__(self.__name__,
+                              self.__bases__,
+                              _no_slots_copy(self.__dict__),
+                              tvars=_type_vars(ev_args) if ev_args else None,
+                              args=ev_args,
+                              origin=ev_origin,
+                              extra=self.__extra__,
+                              orig_bases=self.__orig_bases__)
+
+    def __repr__(self):
+        if self.__origin__ is None:
+            return super().__repr__()
+        return self._tree_repr(self._subs_tree())
+
+    def _tree_repr(self, tree):
+        arg_list = []
+        for arg in tree[1:]:
+            if arg == ():
+                arg_list.append('()')
+            elif not isinstance(arg, tuple):
+                arg_list.append(_type_repr(arg))
+            else:
+                arg_list.append(arg[0]._tree_repr(arg))
+        return super().__repr__() + '[%s]' % ', '.join(arg_list)
+
+    def _subs_tree(self, tvars=None, args=None):
+        if self.__origin__ is None:
+            return self
+        tree_args = _subs_tree(self, tvars, args)
+        return (_gorg(self),) + tuple(tree_args)
+
+    def __eq__(self, other):
+        if not isinstance(other, GenericMeta):
+            return NotImplemented
+        if self.__origin__ is None or other.__origin__ is None:
+            return self is other
+        return self.__tree_hash__ == other.__tree_hash__
+
+    def __hash__(self):
+        return self.__tree_hash__
+
+    @_tp_cache
+    def __getitem__(self, params):
+        if not isinstance(params, tuple):
+            params = (params,)
+        if not params and not _gorg(self) is Tuple:
+            raise TypeError(
+                "Parameter list to %s[...] cannot be empty" % _qualname(self))
+        msg = "Parameters to generic types must be types."
+        params = tuple(_type_check(p, msg) for p in params)
+        if self is Generic:
+            # Generic can only be subscripted with unique type variables.
+            if not all(isinstance(p, TypeVar) for p in params):
+                raise TypeError(
+                    "Parameters to Generic[...] must all be type variables")
+            if len(set(params)) != len(params):
+                raise TypeError(
+                    "Parameters to Generic[...] must all be unique")
+            tvars = params
+            args = params
+        elif self in (Tuple, Callable):
+            tvars = _type_vars(params)
+            args = params
+        elif self is _Protocol:
+            # _Protocol is internal, don't check anything.
+            tvars = params
+            args = params
+        elif self.__origin__ in (Generic, _Protocol):
+            # Can't subscript Generic[...] or _Protocol[...].
+            raise TypeError("Cannot subscript already-subscripted %s" %
+                            repr(self))
+        else:
+            # Subscripting a regular Generic subclass.
+            _check_generic(self, params)
+            tvars = _type_vars(params)
+            args = params
+        return self.__class__(self.__name__,
+                              self.__bases__,
+                              _no_slots_copy(self.__dict__),
+                              tvars=tvars,
+                              args=args,
+                              origin=self,
+                              extra=self.__extra__,
+                              orig_bases=self.__orig_bases__)
+
+    def __instancecheck__(self, instance):
+        # Since we extend ABC.__subclasscheck__ and
+        # ABC.__instancecheck__ inlines the cache checking done by the
+        # latter, we must extend __instancecheck__ too. For simplicity
+        # we just skip the cache check -- instance checks for generic
+        # classes are supposed to be rare anyways.
+        return issubclass(instance.__class__, self)
+
+    def __copy__(self):
+        return self.__class__(self.__name__, self.__bases__,
+                              _no_slots_copy(self.__dict__),
+                              self.__parameters__, self.__args__, self.__origin__,
+                              self.__extra__, self.__orig_bases__)
+
+
+# Prevent checks for Generic to crash when defining Generic.
+Generic = None
+
+
+def _generic_new(base_cls, cls, *args, **kwds):
+    # Assure type is erased on instantiation,
+    # but attempt to store it in __orig_class__
+    if cls.__origin__ is None:
+        return base_cls.__new__(cls)
+    else:
+        origin = _gorg(cls)
+        obj = base_cls.__new__(origin)
+        try:
+            obj.__orig_class__ = cls
+        except AttributeError:
+            pass
+        obj.__init__(*args, **kwds)
+        return obj
+
+
+class Generic(metaclass=GenericMeta):
+    """Abstract base class for generic types.
+
+    A generic type is typically declared by inheriting from
+    this class parameterized with one or more type variables.
+    For example, a generic mapping type might be defined as::
+
+      class Mapping(Generic[KT, VT]):
+          def __getitem__(self, key: KT) -> VT:
+              ...
+          # Etc.
+
+    This class can then be used as follows::
+
+      def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT:
+          try:
+              return mapping[key]
+          except KeyError:
+              return default
+    """
+
+    __slots__ = ()
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, Generic):
+            raise TypeError("Type Generic cannot be instantiated; "
+                            "it can be used only as a base class")
+        return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
+
+
+class _TypingEmpty:
+    """Internal placeholder for () or []. Used by TupleMeta and CallableMeta
+    to allow empty list/tuple in specific places, without allowing them
+    to sneak in where prohibited.
+    """
+
+
+class _TypingEllipsis:
+    """Internal placeholder for ... (ellipsis)."""
+
+
+class TupleMeta(GenericMeta):
+    """Metaclass for Tuple (internal)."""
+
+    @_tp_cache
+    def __getitem__(self, parameters):
+        if self.__origin__ is not None or not _geqv(self, Tuple):
+            # Normal generic rules apply if this is not the first subscription
+            # or a subscription of a subclass.
+            return super().__getitem__(parameters)
+        if parameters == ():
+            return super().__getitem__((_TypingEmpty,))
+        if not isinstance(parameters, tuple):
+            parameters = (parameters,)
+        if len(parameters) == 2 and parameters[1] is ...:
+            msg = "Tuple[t, ...]: t must be a type."
+            p = _type_check(parameters[0], msg)
+            return super().__getitem__((p, _TypingEllipsis))
+        msg = "Tuple[t0, t1, ...]: each t must be a type."
+        parameters = tuple(_type_check(p, msg) for p in parameters)
+        return super().__getitem__(parameters)
+
+    def __instancecheck__(self, obj):
+        if self.__args__ == None:
+            return isinstance(obj, tuple)
+        raise TypeError("Parameterized Tuple cannot be used "
+                        "with isinstance().")
+
+    def __subclasscheck__(self, cls):
+        if self.__args__ == None:
+            return issubclass(cls, tuple)
+        raise TypeError("Parameterized Tuple cannot be used "
+                        "with issubclass().")
+
+
+class Tuple(tuple, extra=tuple, metaclass=TupleMeta):
+    """Tuple type; Tuple[X, Y] is the cross-product type of X and Y.
+
+    Example: Tuple[T1, T2] is a tuple of two elements corresponding
+    to type variables T1 and T2.  Tuple[int, float, str] is a tuple
+    of an int, a float and a string.
+
+    To specify a variable-length tuple of homogeneous type, use Tuple[T, ...].
+    """
+
+    __slots__ = ()
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, Tuple):
+            raise TypeError("Type Tuple cannot be instantiated; "
+                            "use tuple() instead")
+        return _generic_new(tuple, cls, *args, **kwds)
+
+
+class CallableMeta(GenericMeta):
+    """Metaclass for Callable (internal)."""
+
+    def __repr__(self):
+        if self.__origin__ is None:
+            return super().__repr__()
+        return self._tree_repr(self._subs_tree())
+
+    def _tree_repr(self, tree):
+        if _gorg(self) is not Callable:
+            return super()._tree_repr(tree)
+        # For actual Callable (not its subclass) we override
+        # super()._tree_repr() for nice formatting.
+        arg_list = []
+        for arg in tree[1:]:
+            if not isinstance(arg, tuple):
+                arg_list.append(_type_repr(arg))
+            else:
+                arg_list.append(arg[0]._tree_repr(arg))
+        if arg_list[0] == '...':
+            return repr(tree[0]) + '[..., %s]' % arg_list[1]
+        return (repr(tree[0]) +
+                '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1]))
+
+    def __getitem__(self, parameters):
+        """A thin wrapper around __getitem_inner__ to provide the latter
+        with hashable arguments to improve speed.
+        """
+
+        if  self.__origin__ is not None or not _geqv(self, Callable):
+            return super().__getitem__(parameters)
+        if not isinstance(parameters, tuple) or len(parameters) != 2:
+            raise TypeError("Callable must be used as "
+                            "Callable[[arg, ...], result].")
+        args, result = parameters
+        if args is Ellipsis:
+            parameters = (Ellipsis, result)
+        else:
+            if not isinstance(args, list):
+                raise TypeError("Callable[args, result]: args must be a list."
+                                " Got %.100r." % (args,))
+            parameters = (tuple(args), result)
+        return self.__getitem_inner__(parameters)
+
+    @_tp_cache
+    def __getitem_inner__(self, parameters):
+        args, result = parameters
+        msg = "Callable[args, result]: result must be a type."
+        result = _type_check(result, msg)
+        if args is Ellipsis:
+            return super().__getitem__((_TypingEllipsis, result))
+        msg = "Callable[[arg, ...], result]: each arg must be a type."
+        args = tuple(_type_check(arg, msg) for arg in args)
+        parameters = args + (result,)
+        return super().__getitem__(parameters)
+
+
+class Callable(extra=collections_abc.Callable, metaclass = CallableMeta):
+    """Callable type; Callable[[int], str] is a function of (int) -> str.
+
+    The subscription syntax must always be used with exactly two
+    values: the argument list and the return type.  The argument list
+    must be a list of types or ellipsis; the return type must be a single type.
+
+    There is no syntax to indicate optional or keyword arguments,
+    such function types are rarely used as callback types.
+    """
+
+    __slots__ = ()
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, Callable):
+            raise TypeError("Type Callable cannot be instantiated; "
+                            "use a non-abstract subclass instead")
+        return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
+
+
+class _ClassVar(_FinalTypingBase, _root=True):
+    """Special type construct to mark class variables.
+
+    An annotation wrapped in ClassVar indicates that a given
+    attribute is intended to be used as a class variable and
+    should not be set on instances of that class. Usage::
+
+      class Starship:
+          stats: ClassVar[Dict[str, int]] = {} # class variable
+          damage: int = 10                     # instance variable
+
+    ClassVar accepts only types and cannot be further subscribed.
+
+    Note that ClassVar is not a class itself, and should not
+    be used with isinstance() or issubclass().
+    """
+
+    __slots__ = ('__type__',)
+
+    def __init__(self, tp=None, **kwds):
+        self.__type__ = tp
+
+    def __getitem__(self, item):
+        cls = type(self)
+        if self.__type__ is None:
+            return cls(_type_check(item,
+                       '{} accepts only single type.'.format(cls.__name__[1:])),
+                       _root=True)
+        raise TypeError('{} cannot be further subscripted'
+                        .format(cls.__name__[1:]))
+
+    def _eval_type(self, globalns, localns):
+        new_tp = _eval_type(self.__type__, globalns, localns)
+        if new_tp == self.__type__:
+            return self
+        return type(self)(new_tp, _root=True)
+
+    def __repr__(self):
+        r = super().__repr__()
+        if self.__type__ is not None:
+            r += '[{}]'.format(_type_repr(self.__type__))
+        return r
+
+    def __hash__(self):
+        return hash((type(self).__name__, self.__type__))
+
+    def __eq__(self, other):
+        if not isinstance(other, _ClassVar):
+            return NotImplemented
+        if self.__type__ is not None:
+            return self.__type__ == other.__type__
+        return self is other
+
+
+ClassVar = _ClassVar(_root=True)
+
+
+def cast(typ, val):
+    """Cast a value to a type.
+
+    This returns the value unchanged.  To the type checker this
+    signals that the return value has the designated type, but at
+    runtime we intentionally don't check anything (we want this
+    to be as fast as possible).
+    """
+    return val
+
+
+def _get_defaults(func):
+    """Internal helper to extract the default arguments, by name."""
+    try:
+        code = func.__code__
+    except AttributeError:
+        # Some built-in functions don't have __code__, __defaults__, etc.
+        return {}
+    pos_count = code.co_argcount
+    arg_names = code.co_varnames
+    arg_names = arg_names[:pos_count]
+    defaults = func.__defaults__ or ()
+    kwdefaults = func.__kwdefaults__
+    res = dict(kwdefaults) if kwdefaults else {}
+    pos_offset = pos_count - len(defaults)
+    for name, value in zip(arg_names[pos_offset:], defaults):
+        assert name not in res
+        res[name] = value
+    return res
+
+
+def get_type_hints(obj, globalns=None, localns=None):
+    """Return type hints for an object.
+
+    This is often the same as obj.__annotations__, but it handles
+    forward references encoded as string literals, and if necessary
+    adds Optional[t] if a default value equal to None is set.
+
+    The argument may be a module, class, method, or function. The annotations
+    are returned as a dictionary. For classes, annotations include also
+    inherited members.
+
+    TypeError is raised if the argument is not of a type that can contain
+    annotations, and an empty dictionary is returned if no annotations are
+    present.
+
+    BEWARE -- the behavior of globalns and localns is counterintuitive
+    (unless you are familiar with how eval() and exec() work).  The
+    search order is locals first, then globals.
+
+    - If no dict arguments are passed, an attempt is made to use the
+      globals from obj, and these are also used as the locals.  If the
+      object does not appear to have globals, an exception is raised.
+
+    - If one dict argument is passed, it is used for both globals and
+      locals.
+
+    - If two dict arguments are passed, they specify globals and
+      locals, respectively.
+    """
+
+    if getattr(obj, '__no_type_check__', None):
+        return {}
+    if globalns is None:
+        globalns = getattr(obj, '__globals__', {})
+        if localns is None:
+            localns = globalns
+    elif localns is None:
+        localns = globalns
+    # Classes require a special treatment.
+    if isinstance(obj, type):
+        hints = {}
+        for base in reversed(obj.__mro__):
+            ann = base.__dict__.get('__annotations__', {})
+            for name, value in ann.items():
+                if value is None:
+                    value = type(None)
+                if isinstance(value, str):
+                    value = _ForwardRef(value)
+                value = _eval_type(value, globalns, localns)
+                hints[name] = value
+        return hints
+    hints = getattr(obj, '__annotations__', None)
+    if hints is None:
+        # Return empty annotations for something that _could_ have them.
+        if (isinstance(obj, types.FunctionType) or
+            isinstance(obj, types.BuiltinFunctionType) or
+            isinstance(obj, types.MethodType) or
+            isinstance(obj, types.ModuleType)):
+            return {}
+        else:
+            raise TypeError('{!r} is not a module, class, method, '
+                            'or function.'.format(obj))
+    defaults = _get_defaults(obj)
+    hints = dict(hints)
+    for name, value in hints.items():
+        if value is None:
+            value = type(None)
+        if isinstance(value, str):
+            value = _ForwardRef(value)
+        value = _eval_type(value, globalns, localns)
+        if name in defaults and defaults[name] is None:
+            value = Optional[value]
+        hints[name] = value
+    return hints
+
+
+def no_type_check(arg):
+    """Decorator to indicate that annotations are not type hints.
+
+    The argument must be a class or function; if it is a class, it
+    applies recursively to all methods and classes defined in that class
+    (but not to methods defined in its superclasses or subclasses).
+
+    This mutates the function(s) or class(es) in place.
+    """
+    if isinstance(arg, type):
+        arg_attrs = arg.__dict__.copy()
+        for attr, val in arg.__dict__.items():
+            if val in arg.__bases__:
+                arg_attrs.pop(attr)
+        for obj in arg_attrs.values():
+            if isinstance(obj, types.FunctionType):
+                obj.__no_type_check__ = True
+            if isinstance(obj, type):
+                no_type_check(obj)
+    try:
+        arg.__no_type_check__ = True
+    except TypeError: # built-in classes
+        pass
+    return arg
+
+
+def no_type_check_decorator(decorator):
+    """Decorator to give another decorator the @no_type_check effect.
+
+    This wraps the decorator with something that wraps the decorated
+    function in @no_type_check.
+    """
+
+    @functools.wraps(decorator)
+    def wrapped_decorator(*args, **kwds):
+        func = decorator(*args, **kwds)
+        func = no_type_check(func)
+        return func
+
+    return wrapped_decorator
+
+
+def _overload_dummy(*args, **kwds):
+    """Helper for @overload to raise when called."""
+    raise NotImplementedError(
+        "You should not call an overloaded function. "
+        "A series of @overload-decorated functions "
+        "outside a stub module should always be followed "
+        "by an implementation that is not @overload-ed.")
+
+
+def overload(func):
+    """Decorator for overloaded functions/methods.
+
+    In a stub file, place two or more stub definitions for the same
+    function in a row, each decorated with @overload.  For example:
+
+      @overload
+      def utf8(value: None) -> None: ...
+      @overload
+      def utf8(value: bytes) -> bytes: ...
+      @overload
+      def utf8(value: str) -> bytes: ...
+
+    In a non-stub file (i.e. a regular .py file), do the same but
+    follow it with an implementation.  The implementation should *not*
+    be decorated with @overload.  For example:
+
+      @overload
+      def utf8(value: None) -> None: ...
+      @overload
+      def utf8(value: bytes) -> bytes: ...
+      @overload
+      def utf8(value: str) -> bytes: ...
+      def utf8(value):
+          # implementation goes here
+    """
+    return _overload_dummy
+
+
+class _ProtocolMeta(GenericMeta):
+    """Internal metaclass for _Protocol.
+
+    This exists so _Protocol classes can be generic without deriving
+    from Generic.
+    """
+
+    def __instancecheck__(self, obj):
+        if _Protocol not in self.__bases__:
+            return super().__instancecheck__(obj)
+        raise TypeError("Protocols cannot be used with isinstance().")
+
+    def __subclasscheck__(self, cls):
+        if not self._is_protocol:
+            # No structural checks since this isn't a protocol.
+            return NotImplemented
+
+        if self is _Protocol:
+            # Every class is a subclass of the empty protocol.
+            return True
+
+        # Find all attributes defined in the protocol.
+        attrs = self._get_protocol_attrs()
+
+        for attr in attrs:
+            if not any(attr in d.__dict__ for d in cls.__mro__):
+                return False
+        return True
+
+    def _get_protocol_attrs(self):
+        # Get all Protocol base classes.
+        protocol_bases = []
+        for c in self.__mro__:
+            if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol':
+                protocol_bases.append(c)
+
+        # Get attributes included in protocol.
+        attrs = set()
+        for base in protocol_bases:
+            for attr in base.__dict__.keys():
+                # Include attributes not defined in any non-protocol bases.
+                for c in self.__mro__:
+                    if (c is not base and attr in c.__dict__ and
+                            not getattr(c, '_is_protocol', False)):
+                        break
+                else:
+                    if (not attr.startswith('_abc_') and
+                            attr != '__abstractmethods__' and
+                            attr != '__annotations__' and
+                            attr != '__weakref__' and
+                            attr != '_is_protocol' and
+                            attr != '__dict__' and
+                            attr != '__args__' and
+                            attr != '__slots__' and
+                            attr != '_get_protocol_attrs' and
+                            attr != '__next_in_mro__' and
+                            attr != '__parameters__' and
+                            attr != '__origin__' and
+                            attr != '__orig_bases__' and
+                            attr != '__extra__' and
+                            attr != '__tree_hash__' and
+                            attr != '__module__'):
+                        attrs.add(attr)
+
+        return attrs
+
+
+class _Protocol(metaclass=_ProtocolMeta):
+    """Internal base class for protocol classes.
+
+    This implements a simple-minded structural issubclass check
+    (similar but more general than the one-offs in collections.abc
+    such as Hashable).
+    """
+
+    __slots__ = ()
+
+    _is_protocol = True
+
+
+# Various ABCs mimicking those in collections.abc.
+# A few are simply re-exported for completeness.
+
+Hashable = collections_abc.Hashable  # Not generic.
+
+
+if hasattr(collections_abc, 'Awaitable'):
+    class Awaitable(Generic[T_co], extra=collections_abc.Awaitable):
+        __slots__ = ()
+
+    __all__.append('Awaitable')
+
+
+if hasattr(collections_abc, 'Coroutine'):
+    class Coroutine(Awaitable[V_co], Generic[T_co, T_contra, V_co],
+                    extra=collections_abc.Coroutine):
+        __slots__ = ()
+
+    __all__.append('Coroutine')
+
+
+if hasattr(collections_abc, 'AsyncIterable'):
+
+    class AsyncIterable(Generic[T_co], extra=collections_abc.AsyncIterable):
+        __slots__ = ()
+
+    class AsyncIterator(AsyncIterable[T_co],
+                        extra=collections_abc.AsyncIterator):
+        __slots__ = ()
+
+    __all__.append('AsyncIterable')
+    __all__.append('AsyncIterator')
+
+
+class Iterable(Generic[T_co], extra=collections_abc.Iterable):
+    __slots__ = ()
+
+
+class Iterator(Iterable[T_co], extra=collections_abc.Iterator):
+    __slots__ = ()
+
+
+class SupportsInt(_Protocol):
+    __slots__ = ()
+
+    @abstractmethod
+    def __int__(self) -> int:
+        pass
+
+
+class SupportsFloat(_Protocol):
+    __slots__ = ()
+
+    @abstractmethod
+    def __float__(self) -> float:
+        pass
+
+
+class SupportsComplex(_Protocol):
+    __slots__ = ()
+
+    @abstractmethod
+    def __complex__(self) -> complex:
+        pass
+
+
+class SupportsBytes(_Protocol):
+    __slots__ = ()
+
+    @abstractmethod
+    def __bytes__(self) -> bytes:
+        pass
+
+
+class SupportsAbs(_Protocol[T_co]):
+    __slots__ = ()
+
+    @abstractmethod
+    def __abs__(self) -> T_co:
+        pass
+
+
+class SupportsRound(_Protocol[T_co]):
+    __slots__ = ()
+
+    @abstractmethod
+    def __round__(self, ndigits: int = 0) -> T_co:
+        pass
+
+
+if hasattr(collections_abc, 'Reversible'):
+    class Reversible(Iterable[T_co], extra=collections_abc.Reversible):
+        __slots__ = ()
+else:
+    class Reversible(_Protocol[T_co]):
+        __slots__ = ()
+
+        @abstractmethod
+        def __reversed__(self) -> 'Iterator[T_co]':
+            pass
+
+
+Sized = collections_abc.Sized  # Not generic.
+
+
+class Container(Generic[T_co], extra=collections_abc.Container):
+    __slots__ = ()
+
+
+if hasattr(collections_abc, 'Collection'):
+    class Collection(Sized, Iterable[T_co], Container[T_co],
+                     extra=collections_abc.Collection):
+        __slots__ = ()
+
+    __all__.append('Collection')
+
+
+# Callable was defined earlier.
+
+if hasattr(collections_abc, 'Collection'):
+    class AbstractSet(Collection[T_co],
+                      extra=collections_abc.Set):
+        __slots__ = ()
+else:
+    class AbstractSet(Sized, Iterable[T_co], Container[T_co],
+                      extra=collections_abc.Set):
+        __slots__ = ()
+
+
+class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet):
+    __slots__ = ()
+
+
+# NOTE: It is only covariant in the value type.
+if hasattr(collections_abc, 'Collection'):
+    class Mapping(Collection[KT], Generic[KT, VT_co],
+                  extra=collections_abc.Mapping):
+        __slots__ = ()
+else:
+    class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co],
+                  extra=collections_abc.Mapping):
+        __slots__ = ()
+
+
+class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping):
+    __slots__ = ()
+
+if hasattr(collections_abc, 'Reversible'):
+    if hasattr(collections_abc, 'Collection'):
+        class Sequence(Reversible[T_co], Collection[T_co],
+                   extra=collections_abc.Sequence):
+            __slots__ = ()
+    else:
+        class Sequence(Sized, Reversible[T_co], Container[T_co],
+                   extra=collections_abc.Sequence):
+            __slots__ = ()
+else:
+    class Sequence(Sized, Iterable[T_co], Container[T_co],
+                   extra=collections_abc.Sequence):
+        __slots__ = ()
+
+
+class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence):
+    __slots__ = ()
+
+
+class ByteString(Sequence[int], extra=collections_abc.ByteString):
+    __slots__ = ()
+
+
+class List(list, MutableSequence[T], extra=list):
+
+    __slots__ = ()
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, List):
+            raise TypeError("Type List cannot be instantiated; "
+                            "use list() instead")
+        return _generic_new(list, cls, *args, **kwds)
+
+
+class Set(set, MutableSet[T], extra=set):
+
+    __slots__ = ()
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, Set):
+            raise TypeError("Type Set cannot be instantiated; "
+                            "use set() instead")
+        return _generic_new(set, cls, *args, **kwds)
+
+
+class FrozenSet(frozenset, AbstractSet[T_co], extra=frozenset):
+    __slots__ = ()
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, FrozenSet):
+            raise TypeError("Type FrozenSet cannot be instantiated; "
+                            "use frozenset() instead")
+        return _generic_new(frozenset, cls, *args, **kwds)
+
+
+class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView):
+    __slots__ = ()
+
+
+class KeysView(MappingView[KT], AbstractSet[KT],
+               extra=collections_abc.KeysView):
+    __slots__ = ()
+
+
+class ItemsView(MappingView[Tuple[KT, VT_co]],
+                AbstractSet[Tuple[KT, VT_co]],
+                Generic[KT, VT_co],
+                extra=collections_abc.ItemsView):
+    __slots__ = ()
+
+
+class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView):
+    __slots__ = ()
+
+
+if hasattr(contextlib, 'AbstractContextManager'):
+    class ContextManager(Generic[T_co], extra=contextlib.AbstractContextManager):
+        __slots__ = ()
+    __all__.append('ContextManager')
+
+
+class Dict(dict, MutableMapping[KT, VT], extra=dict):
+
+    __slots__ = ()
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, Dict):
+            raise TypeError("Type Dict cannot be instantiated; "
+                            "use dict() instead")
+        return _generic_new(dict, cls, *args, **kwds)
+
+class DefaultDict(collections.defaultdict, MutableMapping[KT, VT],
+                  extra=collections.defaultdict):
+
+    __slots__ = ()
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, DefaultDict):
+            raise TypeError("Type DefaultDict cannot be instantiated; "
+                            "use collections.defaultdict() instead")
+        return _generic_new(collections.defaultdict, cls, *args, **kwds)
+
+# Determine what base class to use for Generator.
+if hasattr(collections_abc, 'Generator'):
+    # Sufficiently recent versions of 3.5 have a Generator ABC.
+    _G_base = collections_abc.Generator
+else:
+    # Fall back on the exact type.
+    _G_base = types.GeneratorType
+
+
+class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co],
+                extra=_G_base):
+    __slots__ = ()
+
+    def __new__(cls, *args, **kwds):
+        if _geqv(cls, Generator):
+            raise TypeError("Type Generator cannot be instantiated; "
+                            "create a subclass instead")
+        return _generic_new(_G_base, cls, *args, **kwds)
+
+
+# Internal type variable used for Type[].
+CT_co = TypeVar('CT_co', covariant=True, bound=type)
+
+
+# This is not a real generic class.  Don't use outside annotations.
+class Type(Generic[CT_co], extra=type):
+    """A special construct usable to annotate class objects.
+
+    For example, suppose we have the following classes::
+
+      class User: ...  # Abstract base for User classes
+      class BasicUser(User): ...
+      class ProUser(User): ...
+      class TeamUser(User): ...
+
+    And a function that takes a class argument that's a subclass of
+    User and returns an instance of the corresponding class::
+
+      U = TypeVar('U', bound=User)
+      def new_user(user_class: Type[U]) -> U:
+          user = user_class()
+          # (Here we could write the user object to a database)
+          return user
+
+      joe = new_user(BasicUser)
+
+    At this point the type checker knows that joe has type BasicUser.
+    """
+
+    __slots__ = ()
+
+
+def _make_nmtuple(name, types):
+    msg = "NamedTuple('Name', [(f0, t0), (f1, t1), ...]); each t must be a type"
+    types = [(n, _type_check(t, msg)) for n, t in types]
+    nm_tpl = collections.namedtuple(name, [n for n, t in types])
+    nm_tpl._field_types = dict(types)
+    try:
+        nm_tpl.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__')
+    except (AttributeError, ValueError):
+        pass
+    return nm_tpl
+
+
+_PY36 = sys.version_info[:2] >= (3, 6)
+
+
+class NamedTupleMeta(type):
+
+    def __new__(cls, typename, bases, ns):
+        if ns.get('_root', False):
+            return super().__new__(cls, typename, bases, ns)
+        if not _PY36:
+            raise TypeError("Class syntax for NamedTuple is only supported"
+                            " in Python 3.6+")
+        types = ns.get('__annotations__', {})
+        return _make_nmtuple(typename, types.items())
+
+class NamedTuple(metaclass=NamedTupleMeta):
+    """Typed version of namedtuple.
+
+    Usage in Python versions >= 3.6::
+
+        class Employee(NamedTuple):
+            name: str
+            id: int
+
+    This is equivalent to::
+
+        Employee = collections.namedtuple('Employee', ['name', 'id'])
+
+    The resulting class has one extra attribute: _field_types,
+    giving a dict mapping field names to types.  (The field names
+    are in the _fields attribute, which is part of the namedtuple
+    API.) Alternative equivalent keyword syntax is also accepted::
+
+        Employee = NamedTuple('Employee', name=str, id=int)
+
+    In Python versions <= 3.5 use::
+
+        Employee = NamedTuple('Employee', [('name', str), ('id', int)])
+    """
+    _root = True
+
+    def __new__(self, typename, fields=None, **kwargs):
+        if kwargs and not _PY36:
+            raise TypeError("Keyword syntax for NamedTuple is only supported"
+                            " in Python 3.6+")
+        if fields is None:
+            fields = kwargs.items()
+        elif kwargs:
+            raise TypeError("Either list of fields or keywords"
+                            " can be provided to NamedTuple, not both")
+        return _make_nmtuple(typename, fields)
+
+
+def NewType(name, tp):
+    """NewType creates simple unique types with almost zero
+    runtime overhead. NewType(name, tp) is considered a subtype of tp
+    by static type checkers. At runtime, NewType(name, tp) returns
+    a dummy function that simply returns its argument. Usage::
+
+        UserId = NewType('UserId', int)
+
+        def name_by_id(user_id: UserId) -> str:
+            ...
+
+        UserId('user')          # Fails type check
+
+        name_by_id(42)          # Fails type check
+        name_by_id(UserId(42))  # OK
+
+        num = UserId(5) + 1     # type: int
+    """
+
+    def new_type(x):
+        return x
+
+    new_type.__name__ = name
+    new_type.__supertype__ = tp
+    return new_type
+
+
+# Python-version-specific alias (Python 2: unicode; Python 3: str)
+Text = str
+
+
+# Constant that's True when type checking, but False here.
+TYPE_CHECKING = False
+
+
+class IO(Generic[AnyStr]):
+    """Generic base class for TextIO and BinaryIO.
+
+    This is an abstract, generic version of the return of open().
+
+    NOTE: This does not distinguish between the different possible
+    classes (text vs. binary, read vs. write vs. read/write,
+    append-only, unbuffered).  The TextIO and BinaryIO subclasses
+    below capture the distinctions between text vs. binary, which is
+    pervasive in the interface; however we currently do not offer a
+    way to track the other distinctions in the type system.
+    """
+
+    __slots__ = ()
+
+    @abstractproperty
+    def mode(self) -> str:
+        pass
+
+    @abstractproperty
+    def name(self) -> str:
+        pass
+
+    @abstractmethod
+    def close(self) -> None:
+        pass
+
+    @abstractmethod
+    def closed(self) -> bool:
+        pass
+
+    @abstractmethod
+    def fileno(self) -> int:
+        pass
+
+    @abstractmethod
+    def flush(self) -> None:
+        pass
+
+    @abstractmethod
+    def isatty(self) -> bool:
+        pass
+
+    @abstractmethod
+    def read(self, n: int = -1) -> AnyStr:
+        pass
+
+    @abstractmethod
+    def readable(self) -> bool:
+        pass
+
+    @abstractmethod
+    def readline(self, limit: int = -1) -> AnyStr:
+        pass
+
+    @abstractmethod
+    def readlines(self, hint: int = -1) -> List[AnyStr]:
+        pass
+
+    @abstractmethod
+    def seek(self, offset: int, whence: int = 0) -> int:
+        pass
+
+    @abstractmethod
+    def seekable(self) -> bool:
+        pass
+
+    @abstractmethod
+    def tell(self) -> int:
+        pass
+
+    @abstractmethod
+    def truncate(self, size: int = None) -> int:
+        pass
+
+    @abstractmethod
+    def writable(self) -> bool:
+        pass
+
+    @abstractmethod
+    def write(self, s: AnyStr) -> int:
+        pass
+
+    @abstractmethod
+    def writelines(self, lines: List[AnyStr]) -> None:
+        pass
+
+    @abstractmethod
+    def __enter__(self) -> 'IO[AnyStr]':
+        pass
+
+    @abstractmethod
+    def __exit__(self, type, value, traceback) -> None:
+        pass
+
+
+class BinaryIO(IO[bytes]):
+    """Typed version of the return of open() in binary mode."""
+
+    __slots__ = ()
+
+    @abstractmethod
+    def write(self, s: Union[bytes, bytearray]) -> int:
+        pass
+
+    @abstractmethod
+    def __enter__(self) -> 'BinaryIO':
+        pass
+
+
+class TextIO(IO[str]):
+    """Typed version of the return of open() in text mode."""
+
+    __slots__ = ()
+
+    @abstractproperty
+    def buffer(self) -> BinaryIO:
+        pass
+
+    @abstractproperty
+    def encoding(self) -> str:
+        pass
+
+    @abstractproperty
+    def errors(self) -> Optional[str]:
+        pass
+
+    @abstractproperty
+    def line_buffering(self) -> bool:
+        pass
+
+    @abstractproperty
+    def newlines(self) -> Any:
+        pass
+
+    @abstractmethod
+    def __enter__(self) -> 'TextIO':
+        pass
+
+
+class io:
+    """Wrapper namespace for IO generic classes."""
+
+    __all__ = ['IO', 'TextIO', 'BinaryIO']
+    IO = IO
+    TextIO = TextIO
+    BinaryIO = BinaryIO
+
+io.__name__ = __name__ + '.io'
+sys.modules[io.__name__] = io
+
+
+Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')),
+                     lambda p: p.pattern)
+Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')),
+                   lambda m: m.re.pattern)
+
+
+class re:
+    """Wrapper namespace for re type aliases."""
+
+    __all__ = ['Pattern', 'Match']
+    Pattern = Pattern
+    Match = Match
+
+re.__name__ = __name__ + '.re'
+sys.modules[re.__name__] = re
diff --git a/misc/actions_stubs.py b/misc/actions_stubs.py
new file mode 100644
index 0000000..978af71
--- /dev/null
+++ b/misc/actions_stubs.py
@@ -0,0 +1,111 @@
+#!/usr/bin/env python3
+import os
+import shutil
+from typing import Tuple, Any
+try:
+    import click
+except ImportError:
+    print("You need the module \'click\'")
+    exit(1)
+
+base_path = os.getcwd()
+
+# I don't know how to set callables with different args
+def apply_all(func: Any, directory: str, extension: str,
+            to_extension: str='', exclude: Tuple[str]=('',),
+            recursive: bool=True, debug: bool=False) -> None:
+    excluded = [x+extension for x in exclude] if exclude else []
+    for p, d, files in os.walk(os.path.join(base_path,directory)):
+        for f in files:
+            if "{}".format(f) in excluded:
+                continue
+            inner_path = os.path.join(p,f)
+            if not inner_path.endswith(extension):
+                continue
+            if to_extension:
+                new_path = "{}{}".format(inner_path[:-len(extension)],to_extension)
+                func(inner_path,new_path)
+            else:
+                func(inner_path)
+        if not recursive:
+            break
+
+def confirm(resp: bool=False, **kargs) -> bool:
+    kargs['rest'] = "to this {f2}/*{e2}".format(**kargs) if kargs.get('f2') else ''
+    prompt = "{act} all files {rec}matching this expression {f1}/*{e1} {rest}".format(**kargs)
+    prompt.format(**kargs)
+    prompt = "{} [{}]|{}: ".format(prompt, 'Y' if resp else 'N', 'n' if resp else 'y')
+    while True:
+        ans = input(prompt).lower()
+        if not ans:
+            return resp
+        if ans not in ['y','n']:
+            print( 'Please, enter (y) or (n).')
+            continue
+        if ans == 'y':
+            return True
+        else:
+            return False
+
+actions = ['cp', 'mv', 'rm']
+ at click.command(context_settings=dict(help_option_names=['-h', '--help']))
+ at click.option('--action', '-a', type=click.Choice(actions), required=True, help="What do I have to do :-)")
+ at click.option('--dir', '-d', 'directory', default='stubs', help="Directory to start search!")
+ at click.option('--ext', '-e', 'extension', default='.py', help="Extension \"from\" will be applied the action. Default .py")
+ at click.option('--to', '-t', 'to_extension', default='.pyi', help="Extension \"to\" will be applied the action if can. Default .pyi")
+ at click.option('--exclude', '-x', multiple=True, default=('__init__',), help="For every appear, will ignore this files. (can set multiples times)")
+ at click.option('--not-recursive', '-n', default=True, is_flag=True, help="Set if don't want to walk recursively.")
+def main(action: str, directory: str, extension: str, to_extension: str,
+    exclude: Tuple[str], not_recursive: bool) -> None:
+    """
+    This script helps to copy/move/remove files based on their extension.
+
+    The three actions will ask you for confirmation.
+
+    Examples (by default the script search in stubs directory):
+
+    - Change extension of all stubs from .py to .pyi:
+
+        python <script.py> -a mv
+
+    - Revert the previous action.
+
+        python <script.py> -a mv -e .pyi -t .py
+
+    - If you want to ignore "awesome.py" files.
+
+        python <script.py> -a [cp|mv|rm] -x awesome
+
+    - If you want to ignore "awesome.py" and "__init__.py" files.
+
+        python <script.py> -a [cp|mv|rm] -x awesome -x __init__
+
+    - If you want to remove all ".todo" files in "todo" directory, but not recursively:
+
+        python <script.py> -a rm -e .todo -d todo -r
+
+    """
+    if action not in actions:
+        print("Your action have to be one of this: {}".format(', '.join(actions)))
+        return
+
+    rec = "[Recursively] " if not_recursive else ''
+    if not extension.startswith('.'):
+        extension = ".{}".format(extension)
+    if not to_extension.startswith('.'):
+        to_extension = ".{}".format(to_extension)
+    if directory.endswith('/'):
+        directory = directory[:-1]
+    if action == 'cp':
+        if confirm(act='Copy',rec=rec, f1=directory, e1=extension, f2=directory, e2=to_extension):
+            apply_all(shutil.copy, directory, extension, to_extension, exclude, not_recursive)
+    elif action == 'rm':
+        if confirm(act='Remove',rec=rec, f1=directory, e1=extension):
+            apply_all(os.remove, directory, extension, exclude=exclude, recursive=not_recursive)
+    elif action == 'mv':
+        if confirm(act='Move',rec=rec, f1=directory, e1=extension, f2=directory, e2=to_extension):
+            apply_all(shutil.move, directory, extension, to_extension, exclude, not_recursive)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/misc/analyze_cache.py b/misc/analyze_cache.py
new file mode 100644
index 0000000..643e2bf
--- /dev/null
+++ b/misc/analyze_cache.py
@@ -0,0 +1,189 @@
+#!/usr/bin/env python
+
+from typing import Any, Dict, Generator, Iterable, List, Optional
+from collections import Counter
+
+import os
+import os.path
+import json
+
+ROOT = ".mypy_cache/3.5"
+
+JsonDict = Dict[str, Any]
+
+class CacheData:
+    def __init__(self, filename: str, data_json: JsonDict, meta_json: JsonDict,
+                 data_size: int, meta_size: int) -> None:
+        self.filename = filename
+        self.data = data_json
+        self.meta = meta_json
+        self.data_size = data_size
+        self.meta_size = meta_size
+
+    @property
+    def total_size(self):
+        return self.data_size + self.meta_size
+
+
+def extract_classes(chunks: Iterable[CacheData]) -> Iterable[JsonDict]:
+    def extract(chunks: Iterable[JsonDict]) -> Iterable[JsonDict]:
+        for chunk in chunks:
+            if isinstance(chunk, dict):
+                yield chunk
+                yield from extract(chunk.values())
+            elif isinstance(chunk, list):
+                yield from extract(chunk)
+    yield from extract([chunk.data for chunk in chunks])
+
+
+def load_json(data_path: str, meta_path: str) -> CacheData:
+    with open(data_path, 'r') as ds:
+        data_json = json.load(ds)
+
+    with open(meta_path, 'r') as ms:
+        meta_json = json.load(ms)
+
+    data_size = os.path.getsize(data_path)
+    meta_size = os.path.getsize(meta_path)
+
+    return CacheData(data_path.replace(".data.json", ".*.json"),
+                     data_json, meta_json, data_size, meta_size)
+
+
+def get_files(root: str) -> Iterable[CacheData]:
+    for (dirpath, dirnames, filenames) in os.walk(root):
+        for filename in filenames:
+            if filename.endswith(".data.json"):
+                meta_filename = filename.replace(".data.json", ".meta.json")
+                yield load_json(
+                        os.path.join(dirpath, filename),
+                        os.path.join(dirpath, meta_filename))
+
+
+def pluck(name: str, chunks: Iterable[JsonDict]) -> Iterable[JsonDict]:
+    return (chunk for chunk in chunks if chunk['.class'] == name)
+
+
+def report_counter(counter: Counter, amount: Optional[int] = None) -> None:
+    for name, count in counter.most_common(amount):
+        print('    {: <8} {}'.format(count, name))
+    print()
+
+
+def report_most_common(chunks: List[JsonDict], amount: Optional[int] = None) -> None:
+    report_counter(Counter(str(chunk) for chunk in chunks), amount)
+
+
+def compress(chunk: JsonDict) -> JsonDict:
+    cache = {}  # type: Dict[int, JsonDict]
+    counter = 0
+    def helper(chunk: Any) -> Any:
+        nonlocal counter
+        if not isinstance(chunk, dict):
+            return chunk
+
+        if len(chunk) <= 2:
+            return chunk
+        id = hash(str(chunk))
+
+        if id in cache:
+            return cache[id]
+        else:
+            cache[id] = {'.id': counter}
+            chunk['.cache_id'] = counter
+            counter += 1
+
+        for name in sorted(chunk.keys()):
+            value = chunk[name]
+            if isinstance(value, list):
+                chunk[name] = [helper(child) for child in value]
+            elif isinstance(value, dict):
+                chunk[name] = helper(value)
+
+        return chunk
+    out = helper(chunk)
+    return out
+
+def decompress(chunk: JsonDict) -> JsonDict:
+    cache = {}  # type: Dict[int, JsonDict]
+    def helper(chunk: Any) -> Any:
+        if not isinstance(chunk, dict):
+            return chunk
+        if '.id' in chunk:
+            return cache[chunk['.id']]
+
+        counter = None
+        if '.cache_id' in chunk:
+            counter = chunk['.cache_id']
+            del chunk['.cache_id']
+
+        for name in sorted(chunk.keys()):
+            value = chunk[name]
+            if isinstance(value, list):
+                chunk[name] = [helper(child) for child in value]
+            elif isinstance(value, dict):
+                chunk[name] = helper(value)
+
+        if counter is not None:
+            cache[counter] = chunk
+
+        return chunk
+    return helper(chunk)
+
+
+
+
+def main() -> None:
+    json_chunks = list(get_files(ROOT))
+    class_chunks = list(extract_classes(json_chunks))
+
+    total_size = sum(chunk.total_size for chunk in json_chunks)
+    print("Total cache size: {:.3f} megabytes".format(total_size / (1024 * 1024)))
+    print()
+
+    class_name_counter = Counter(chunk[".class"] for chunk in class_chunks)
+    print("Most commonly used classes:")
+    report_counter(class_name_counter)
+
+    print("Most common literal chunks:")
+    report_most_common(class_chunks, 15)
+
+    build = None
+    for chunk in json_chunks:
+        if 'build.*.json' in chunk.filename:
+            build = chunk
+            break
+    original = json.dumps(build.data, sort_keys=True)
+    print("Size of build.data.json, in kilobytes: {:.3f}".format(len(original) / 1024))
+
+    build.data = compress(build.data)
+    compressed = json.dumps(build.data, sort_keys=True)
+    print("Size of compressed build.data.json, in kilobytes: {:.3f}".format(len(compressed) / 1024))
+
+    build.data = decompress(build.data)
+    decompressed = json.dumps(build.data, sort_keys=True)
+    print("Size of decompressed build.data.json, in kilobytes: {:.3f}".format(len(decompressed) / 1024))
+
+    print("Lossless conversion back", original == decompressed)
+
+
+    '''var_chunks = list(pluck("Var", class_chunks))
+    report_most_common(var_chunks, 20)
+    print()
+
+    #for var in var_chunks:
+    #    if var['fullname'] == 'self' and not (isinstance(var['type'], dict) and var['type']['.class'] == 'AnyType'):
+    #        print(var)
+    #argument_chunks = list(pluck("Argument", class_chunks))
+
+    symbol_table_node_chunks = list(pluck("SymbolTableNode", class_chunks))
+    report_most_common(symbol_table_node_chunks, 20)
+
+    print()
+    print("Most common")
+    report_most_common(class_chunks, 20)
+    print()'''
+
+
+if __name__ == '__main__':
+    main()
diff --git a/misc/async_matrix.py b/misc/async_matrix.py
new file mode 100644
index 0000000..e9a758a
--- /dev/null
+++ b/misc/async_matrix.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python3
+"""Test various combinations of generators/coroutines.
+
+This was used to cross-check the errors in the test case
+testFullCoroutineMatrix in test-data/unit/check-async-await.test.
+"""
+
+import sys
+from types import coroutine
+from typing import Any, AsyncIterator, Awaitable, Generator, Iterator
+
+# The various things you might try to use in `await` or `yield from`.
+
+def plain_generator() -> Generator[str, None, int]:
+    yield 'a'
+    return 1
+
+async def plain_coroutine() -> int:
+    return 1
+
+ at coroutine
+def decorated_generator() -> Generator[str, None, int]:
+    yield 'a'
+    return 1
+
+ at coroutine
+async def decorated_coroutine() -> int:
+    return 1
+
+class It(Iterator[str]):
+    stop = False
+    def __iter__(self) -> 'It':
+        return self
+    def __next__(self) -> str:
+        if self.stop:
+            raise StopIteration('end')
+        else:
+            self.stop = True
+            return 'a'
+
+def other_iterator() -> It:
+    return It()
+
+class Aw(Awaitable[int]):
+    def __await__(self) -> Generator[str, Any, int]:
+        yield 'a'
+        return 1
+
+def other_coroutine() -> Aw:
+    return Aw()
+
+# The various contexts in which `await` or `yield from` might occur.
+
+def plain_host_generator(func) -> Generator[str, None, None]:
+    yield 'a'
+    x = 0
+    f = func()
+    try:
+        x = yield from f
+    finally:
+        try:
+            f.close()
+        except AttributeError:
+            pass
+
+async def plain_host_coroutine(func) -> None:
+    x = 0
+    x = await func()
+
+ at coroutine
+def decorated_host_generator(func) -> Generator[str, None, None]:
+    yield 'a'
+    x = 0
+    f = func()
+    try:
+        x = yield from f
+    finally:
+        try:
+            f.close()
+        except AttributeError:
+            pass
+
+ at coroutine
+async def decorated_host_coroutine(func) -> None:
+    x = 0
+    x = await func()
+
+# Main driver.
+
+def main():
+    verbose = ('-v' in sys.argv)
+    for host in [plain_host_generator, plain_host_coroutine,
+                 decorated_host_generator, decorated_host_coroutine]:
+        print()
+        print("==== Host:", host.__name__)
+        for func in [plain_generator, plain_coroutine,
+                     decorated_generator, decorated_coroutine,
+                     other_iterator, other_coroutine]:
+            print("  ---- Func:", func.__name__)
+            try:
+                f = host(func)
+                for i in range(10):
+                    try:
+                        x = f.send(None)
+                        if verbose:
+                            print("    yield:", x)
+                    except StopIteration as e:
+                        if verbose:
+                            print("    stop:", e.value)
+                        break
+                else:
+                    if verbose:
+                        print("    ???? still going")
+            except Exception as e:
+                print("    error:", repr(e))
+
+# Run main().
+
+if __name__ == '__main__':
+    main()
diff --git a/misc/fix_annotate.py b/misc/fix_annotate.py
new file mode 100644
index 0000000..0b552bf
--- /dev/null
+++ b/misc/fix_annotate.py
@@ -0,0 +1,219 @@
+"""Fixer for lib2to3 that inserts mypy annotations into all methods.
+
+The simplest way to run this is to copy it into lib2to3's "fixes"
+subdirectory and then run "2to3 -f annotate" over your files.
+
+The fixer transforms e.g.
+
+  def foo(self, bar, baz=12):
+      return bar + baz
+
+into
+
+  def foo(self, bar, baz=12):
+      # type: (Any, int) -> Any
+      return bar + baz
+
+It does not do type inference but it recognizes some basic default
+argument values such as numbers and strings (and assumes their type
+implies the argument type).
+
+It also uses some basic heuristics to decide whether to ignore the
+first argument:
+
+  - always if it's named 'self'
+  - if there's a @classmethod decorator
+
+Finally, it knows that __init__() is supposed to return None.
+"""
+
+from __future__ import print_function
+
+import os
+import re
+
+from lib2to3.fixer_base import BaseFix
+from lib2to3.patcomp import compile_pattern
+from lib2to3.pytree import Leaf, Node
+from lib2to3.fixer_util import token, syms, touch_import
+
+
+class FixAnnotate(BaseFix):
+
+    # This fixer is compatible with the bottom matcher.
+    BM_compatible = True
+
+    # This fixer shouldn't run by default.
+    explicit = True
+
+    # The pattern to match.
+    PATTERN = """
+              funcdef< 'def' name=any parameters< '(' [args=any] ')' > ':' suite=any+ >
+              """
+
+    counter = None if not os.getenv('MAXFIXES') else int(os.getenv('MAXFIXES'))
+
+    def transform(self, node, results):
+        if FixAnnotate.counter is not None:
+            if FixAnnotate.counter <= 0:
+                return
+        suite = results['suite']
+        children = suite[0].children
+
+        # NOTE: I've reverse-engineered the structure of the parse tree.
+        # It's always a list of nodes, the first of which contains the
+        # entire suite.  Its children seem to be:
+        #
+        #   [0] NEWLINE
+        #   [1] INDENT
+        #   [2...n-2] statements (the first may be a docstring)
+        #   [n-1] DEDENT
+        #
+        # Comments before the suite are part of the INDENT's prefix.
+        #
+        # "Compact" functions (e.g. "def foo(x, y): return max(x, y)")
+        # have a different structure that isn't matched by PATTERN.
+
+        ## print('-'*60)
+        ## print(node)
+        ## for i, ch in enumerate(children):
+        ##     print(i, repr(ch.prefix), repr(ch))
+
+        # Check if there's already an annotation.
+        for ch in children:
+            if ch.prefix.lstrip().startswith('# type:'):
+                return  # There's already a # type: comment here; don't change anything.
+
+        # Compute the annotation
+        annot = self.make_annotation(node, results)
+
+        # Insert '# type: {annot}' comment.
+        # For reference, see lib2to3/fixes/fix_tuple_params.py in stdlib.
+        if len(children) >= 2 and children[1].type == token.INDENT:
+            children[1].prefix = '%s# type: %s\n%s' % (children[1].value, annot, children[1].prefix)
+            children[1].changed()
+            if FixAnnotate.counter is not None:
+                FixAnnotate.counter -= 1
+
+        # Also add 'from typing import Any' at the top.
+        if 'Any' in annot:
+            touch_import('typing', 'Any', node)
+
+    def make_annotation(self, node, results):
+        name = results['name']
+        assert isinstance(name, Leaf), repr(name)
+        assert name.type == token.NAME, repr(name)
+        decorators = self.get_decorators(node)
+        is_method = self.is_method(node)
+        if name.value == '__init__' or not self.has_return_exprs(node):
+            restype = 'None'
+        else:
+            restype = 'Any'
+        args = results.get('args')
+        argtypes = []
+        if isinstance(args, Node):
+            children = args.children
+        elif isinstance(args, Leaf):
+            children = [args]
+        else:
+            children = []
+        # Interpret children according to the following grammar:
+        # (('*'|'**')? NAME ['=' expr] ','?)*
+        stars = inferred_type = ''
+        in_default = False
+        at_start = True
+        for child in children:
+            if isinstance(child, Leaf):
+                if child.value in ('*', '**'):
+                    stars += child.value
+                elif child.type == token.NAME and not in_default:
+                    if not is_method or not at_start or 'staticmethod' in decorators:
+                        inferred_type = 'Any'
+                    else:
+                        # Always skip the first argument if it's named 'self'.
+                        # Always skip the first argument of a class method.
+                        if  child.value == 'self' or 'classmethod' in decorators:
+                            pass
+                        else:
+                            inferred_type = 'Any'
+                elif child.value == '=':
+                    in_default = True
+                elif in_default and child.value != ',':
+                    if child.type == token.NUMBER:
+                        if re.match(r'\d+[lL]?$', child.value):
+                            inferred_type = 'int'
+                        else:
+                            inferred_type = 'float'  # TODO: complex?
+                    elif child.type == token.STRING:
+                        if child.value.startswith(('u', 'U')):
+                            inferred_type = 'unicode'
+                        else:
+                            inferred_type = 'str'
+                    elif child.type == token.NAME and child.value in ('True', 'False'):
+                        inferred_type = 'bool'
+                elif child.value == ',':
+                    if inferred_type:
+                        argtypes.append(stars + inferred_type)
+                    # Reset
+                    stars = inferred_type = ''
+                    in_default = False
+                    at_start = False
+        if inferred_type:
+            argtypes.append(stars + inferred_type)
+        return '(' + ', '.join(argtypes) + ') -> ' + restype
+
+    # The parse tree has a different shape when there is a single
+    # decorator vs. when there are multiple decorators.
+    DECORATED = "decorated< (d=decorator | decorators< dd=decorator+ >) funcdef >"
+    decorated = compile_pattern(DECORATED)
+
+    def get_decorators(self, node):
+        """Return a list of decorators found on a function definition.
+
+        This is a list of strings; only simple decorators
+        (e.g. @staticmethod) are returned.
+
+        If the function is undecorated or only non-simple decorators
+        are found, return [].
+        """
+        if node.parent is None:
+            return []
+        results = {}
+        if not self.decorated.match(node.parent, results):
+            return []
+        decorators = results.get('dd') or [results['d']]
+        decs = []
+        for d in decorators:
+            for child in d.children:
+                if isinstance(child, Leaf) and child.type == token.NAME:
+                    decs.append(child.value)
+        return decs
+
+    def is_method(self, node):
+        """Return whether the node occurs (directly) inside a class."""
+        node = node.parent
+        while node is not None:
+            if node.type == syms.classdef:
+                return True
+            if node.type == syms.funcdef:
+                return False
+            node = node.parent
+        return False
+
+    RETURN_EXPR = "return_stmt< 'return' any >"
+    return_expr = compile_pattern(RETURN_EXPR)
+
+    def has_return_exprs(self, node):
+        """Traverse the tree below node looking for 'return expr'.
+
+        Return True if at least 'return expr' is found, False if not.
+        (If both 'return' and 'return expr' are found, return True.)
+        """
+        results = {}
+        if self.return_expr.match(node, results):
+            return True
+        for child in node.children:
+            if child.type not in (syms.funcdef, syms.classdef):
+                if self.has_return_exprs(child):
+                    return True
+        return False
diff --git a/misc/incremental_checker.py b/misc/incremental_checker.py
new file mode 100755
index 0000000..515e662
--- /dev/null
+++ b/misc/incremental_checker.py
@@ -0,0 +1,356 @@
+#!/usr/bin/env python3
+"""
+This file compares the output and runtime of running normal vs incremental mode
+on the history of any arbitrary git repo as a way of performing a sanity check
+to make sure incremental mode is working correctly and efficiently.
+
+It does so by first running mypy without incremental mode on the specified range
+of commits to find the expected result, then rewinds back to the first commit and
+re-runs mypy on the commits with incremental mode enabled to make sure it returns
+the same results.
+
+This script will download and test the offical mypy repo by default. Running:
+
+    python3 misc/incremental_checker.py last 30
+
+is equivalent to running
+
+    python3 misc/incremental_checker.py last 30 \\
+            --repo_url https://github.com/python/mypy.git \\
+            --file-path mypy
+
+You can chose to run this script against a specific commit id or against the
+last n commits.
+
+To run this script against the last 30 commits:
+
+    python3 misc/incremental_checker.py last 30
+
+To run this script starting from the commit id 2a432b:
+
+    python3 misc/incremental_checker.py commit 2a432b
+"""
+
+from typing import Any, Dict, List, Optional, Tuple
+
+from argparse import (ArgumentParser, RawDescriptionHelpFormatter,
+                      ArgumentDefaultsHelpFormatter, Namespace)
+import base64
+import json
+import os
+import random
+import shutil
+import subprocess
+import sys
+import textwrap
+import time
+
+
+CACHE_PATH = ".incremental_checker_cache.json"
+MYPY_REPO_URL = "https://github.com/python/mypy.git"
+MYPY_TARGET_FILE = "mypy"
+
+JsonDict = Dict[str, Any]
+
+
+def print_offset(text: str, indent_length: int = 4) -> None:
+    print()
+    print(textwrap.indent(text, ' ' * indent_length))
+    print()
+
+
+def delete_folder(folder_path: str) -> None:
+    if os.path.exists(folder_path):
+        shutil.rmtree(folder_path)
+
+
+def execute(command: List[str], fail_on_error: bool = True) -> Tuple[str, str, int]:
+    proc = subprocess.Popen(
+        ' '.join(command),
+        stderr=subprocess.PIPE,
+        stdout=subprocess.PIPE,
+        shell=True)
+    stdout_bytes, stderr_bytes = proc.communicate()  # type: Tuple[bytes, bytes]
+    stdout, stderr = stdout_bytes.decode('utf-8'), stderr_bytes.decode('utf-8')
+    if fail_on_error and proc.returncode != 0:
+        print('EXECUTED COMMAND:', repr(command))
+        print('RETURN CODE:', proc.returncode)
+        print()
+        print('STDOUT:')
+        print_offset(stdout)
+        print('STDERR:')
+        print_offset(stderr)
+        raise RuntimeError('Unexpected error from external tool.')
+    return stdout, stderr, proc.returncode
+
+
+def ensure_environment_is_ready(mypy_path: str, temp_repo_path: str, mypy_cache_path: str) -> None:
+    os.chdir(mypy_path)
+    delete_folder(temp_repo_path)
+    delete_folder(mypy_cache_path)
+
+
+def initialize_repo(repo_url: str, temp_repo_path: str, branch: str) -> None:
+    print("Cloning repo {0} to {1}".format(repo_url, temp_repo_path))
+    execute(["git", "clone", repo_url, temp_repo_path])
+    if branch is not None:
+        print("Checking out branch {}".format(branch))
+        execute(["git", "-C", temp_repo_path, "checkout", branch])
+
+
+def get_commits(repo_folder_path: str, commit_range: str) -> List[Tuple[str, str]]:
+    raw_data, _stderr, _errcode = execute([
+        "git", "-C", repo_folder_path, "log", "--reverse", "--oneline", commit_range])
+    output = []
+    for line in raw_data.strip().split('\n'):
+        commit_id, _, message = line.partition(' ')
+        output.append((commit_id, message))
+    return output
+
+
+def get_commits_starting_at(repo_folder_path: str, start_commit: str) -> List[Tuple[str, str]]:
+    print("Fetching commits starting at {0}".format(start_commit))
+    return get_commits(repo_folder_path, '{0}^..HEAD'.format(start_commit))
+
+
+def get_nth_commit(repo_folder_path, n: int) -> Tuple[str, str]:
+    print("Fetching last {} commits (or all, if there are fewer commits than n)".format(n))
+    return get_commits(repo_folder_path, '-{}'.format(n))[0]
+
+
+def run_mypy(target_file_path: Optional[str],
+             mypy_cache_path: str,
+             mypy_script: Optional[str],
+             incremental: bool = True,
+             verbose: bool = False) -> Tuple[float, str]:
+    """Runs mypy against `target_file_path` and returns what mypy prints to stdout as a string.
+
+    If `incremental` is set to True, this function will use store and retrieve all caching data
+    inside `mypy_cache_path`. If `verbose` is set to True, this function will pass the "-v -v"
+    flags to mypy to make it output debugging information.
+    """
+    if mypy_script is None:
+        command = ["python3", "-m", "mypy"]
+    else:
+        command = [mypy_script]
+    command.extend(["--cache-dir", mypy_cache_path])
+    if incremental:
+        command.append("--incremental")
+    if verbose:
+        command.extend(["-v", "-v"])
+    if target_file_path is not None:
+        command.append(target_file_path)
+    start = time.time()
+    output, stderr, _ = execute(command, False)
+    if stderr != "":
+        output = stderr
+    runtime = time.time() - start
+    return runtime, output
+
+
+def load_cache(incremental_cache_path: str = CACHE_PATH) -> JsonDict:
+    if os.path.exists(incremental_cache_path):
+        with open(incremental_cache_path, 'r') as stream:
+            return json.load(stream)
+    else:
+        return {}
+
+
+def save_cache(cache: JsonDict, incremental_cache_path: str = CACHE_PATH) -> None:
+    with open(incremental_cache_path, 'w') as stream:
+        json.dump(cache, stream, indent=2)
+
+
+def set_expected(commits: List[Tuple[str, str]],
+                 cache: JsonDict,
+                 temp_repo_path: str,
+                 target_file_path: Optional[str],
+                 mypy_cache_path: str,
+                 mypy_script: Optional[str]) -> None:
+    """Populates the given `cache` with the expected results for all of the given `commits`.
+
+    This function runs mypy on the `target_file_path` inside the `temp_repo_path`, and stores
+    the result in the `cache`.
+
+    If `cache` already contains results for a particular commit, this function will
+    skip evaluating that commit and move on to the next."""
+    for commit_id, message in commits:
+        if commit_id in cache:
+            print('Skipping commit (already cached): {0}: "{1}"'.format(commit_id, message))
+        else:
+            print('Caching expected output for commit {0}: "{1}"'.format(commit_id, message))
+            execute(["git", "-C", temp_repo_path, "checkout", commit_id])
+            runtime, output = run_mypy(target_file_path, mypy_cache_path, mypy_script,
+                                       incremental=False)
+            cache[commit_id] = {'runtime': runtime, 'output': output}
+            if output == "":
+                print("    Clean output ({:.3f} sec)".format(runtime))
+            else:
+                print("    Output ({:.3f} sec)".format(runtime))
+                print_offset(output, 8)
+    print()
+
+
+def test_incremental(commits: List[Tuple[str, str]],
+                     cache: JsonDict,
+                     temp_repo_path: str,
+                     target_file_path: Optional[str],
+                     mypy_cache_path: str,
+                     mypy_script: Optional[str]) -> None:
+    """Runs incremental mode on all `commits` to verify the output matches the expected output.
+
+    This function runs mypy on the `target_file_path` inside the `temp_repo_path`. The
+    expected output must be stored inside of the given `cache`.
+    """
+    print("Note: first commit is evaluated twice to warm up cache")
+    commits = [commits[0]] + commits
+    for commit_id, message in commits:
+        print('Now testing commit {0}: "{1}"'.format(commit_id, message))
+        execute(["git", "-C", temp_repo_path, "checkout", commit_id])
+        runtime, output = run_mypy(target_file_path, mypy_cache_path, mypy_script,
+                                   incremental=True)
+        expected_runtime = cache[commit_id]['runtime']  # type: float
+        expected_output = cache[commit_id]['output']  # type: str
+        if output != expected_output:
+            print("    Output does not match expected result!")
+            print("    Expected output ({:.3f} sec):".format(expected_runtime))
+            print_offset(expected_output, 8)
+            print("    Actual output: ({:.3f} sec):".format(runtime))
+            print_offset(output, 8)
+        else:
+            print("    Output matches expected result!")
+            print("    Incremental: {:.3f} sec".format(runtime))
+            print("    Original:    {:.3f} sec".format(expected_runtime))
+
+
+def cleanup(temp_repo_path: str, mypy_cache_path: str) -> None:
+    delete_folder(temp_repo_path)
+    delete_folder(mypy_cache_path)
+
+
+def test_repo(target_repo_url: str, temp_repo_path: str,
+              target_file_path: Optional[str],
+              mypy_path: str, incremental_cache_path: str, mypy_cache_path: str,
+              range_type: str, range_start: str, branch: str,
+              params: Optional[Namespace] = None) -> None:
+    """Tests incremental mode against the repo specified in `target_repo_url`.
+
+    This algorithm runs in five main stages:
+
+    1.  Clones `target_repo_url` into the `temp_repo_path` folder locally,
+        checking out the specified `branch` if applicable.
+    2.  Examines the repo's history to get the list of all commits to
+        to test incremental mode on.
+    3.  Runs mypy WITHOUT incremental mode against the `target_file_path` (which is
+        assumed to be located inside the `temp_repo_path`), testing each commit
+        discovered in stage two.
+        -   If the results of running mypy WITHOUT incremental mode on a
+            particular commit are already cached inside the `incremental_cache_path`,
+            skip that commit to save time.
+        -   Cache the results after finishing.
+    4.  Rewind back to the first commit, and run mypy WITH incremental mode
+        against the `target_file_path` commit-by-commit, and compare to the expected
+        results found in stage 3.
+    5.  Delete all unnecessary temp files.
+    """
+    # Stage 1: Clone repo and get ready to being testing
+    ensure_environment_is_ready(mypy_path, temp_repo_path, mypy_cache_path)
+    initialize_repo(target_repo_url, temp_repo_path, branch)
+
+    # Stage 2: Get all commits we want to test
+    if range_type == "last":
+        start_commit = get_nth_commit(temp_repo_path, int(range_start))[0]
+    elif range_type == "commit":
+        start_commit = range_start
+    else:
+        raise RuntimeError("Invalid option: {}".format(range_type))
+    commits = get_commits_starting_at(temp_repo_path, start_commit)
+    if params is not None and params.sample:
+        seed = params.seed or base64.urlsafe_b64encode(os.urandom(15)).decode('ascii')
+        random.seed(seed)
+        commits = random.sample(commits, params.sample)
+        print("Sampled down to %d commits using random seed %s" % (len(commits), seed))
+
+    # Stage 3: Find and cache expected results for each commit (without incremental mode)
+    cache = load_cache(incremental_cache_path)
+    set_expected(commits, cache, temp_repo_path, target_file_path, mypy_cache_path,
+                 mypy_script=params.mypy_script)
+    save_cache(cache, incremental_cache_path)
+
+    # Stage 4: Rewind and re-run mypy (with incremental mode enabled)
+    test_incremental(commits, cache, temp_repo_path, target_file_path, mypy_cache_path,
+                     mypy_script=params.mypy_script)
+
+    # Stage 5: Remove temp files
+    cleanup(temp_repo_path, mypy_cache_path)
+
+
+def main() -> None:
+    help_factory = (lambda prog: RawDescriptionHelpFormatter(prog=prog, max_help_position=32))
+    parser = ArgumentParser(
+        prog='incremental_checker',
+        description=__doc__,
+        formatter_class=help_factory)
+
+    parser.add_argument("range_type", metavar="START_TYPE", choices=["last", "commit"],
+                        help="must be one of 'last' or 'commit'")
+    parser.add_argument("range_start", metavar="COMMIT_ID_OR_NUMBER",
+                        help="the commit id to start from, or the number of "
+                        "commits to move back (see above)")
+    parser.add_argument("-r", "--repo_url", default=MYPY_REPO_URL, metavar="URL",
+                        help="the repo to clone and run tests on")
+    parser.add_argument("-f", "--file-path", default=MYPY_TARGET_FILE, metavar="FILE",
+                        help="the name of the file or directory to typecheck")
+    parser.add_argument("--cache-path", default=CACHE_PATH, metavar="DIR",
+                        help="sets a custom location to store cache data")
+    parser.add_argument("--branch", default=None, metavar="NAME",
+                        help="check out and test a custom branch"
+                        "uses the default if not specified")
+    parser.add_argument("--sample", type=int, help="use a random sample of size SAMPLE")
+    parser.add_argument("--seed", type=str, help="random seed")
+    parser.add_argument("--mypy-script", type=str, help="alternate mypy script to run")
+
+    if len(sys.argv[1:]) == 0:
+        parser.print_help()
+        parser.exit()
+
+    params = parser.parse_args(sys.argv[1:])
+
+    # Make all paths absolute so we avoid having to worry about being in the right folder
+
+    # The path to this specific script (incremental_checker.py).
+    script_path = os.path.abspath(sys.argv[0])
+
+    # The path to the mypy repo.
+    mypy_path = os.path.abspath(os.path.dirname(os.path.dirname(script_path)))
+
+    # The folder the cloned repo will reside in.
+    temp_repo_path = os.path.abspath(os.path.join(mypy_path, "tmp_repo"))
+
+    # The particular file or package to typecheck inside the repo.
+    if params.file_path:
+        target_file_path = os.path.abspath(os.path.join(temp_repo_path, params.file_path))
+    else:
+        # Allow `-f ''` to clear target_file_path.
+        target_file_path = None
+
+    # The path to where the incremental checker cache data is stored.
+    incremental_cache_path = os.path.abspath(params.cache_path)
+
+    # The path to store the mypy incremental mode cache data
+    mypy_cache_path = os.path.abspath(os.path.join(mypy_path, "misc", ".mypy_cache"))
+
+    print("Assuming mypy is located at {0}".format(mypy_path))
+    print("Temp repo will be cloned at {0}".format(temp_repo_path))
+    print("Testing file/dir located at {0}".format(target_file_path))
+    print("Using cache data located at {0}".format(incremental_cache_path))
+    print()
+
+    test_repo(params.repo_url, temp_repo_path, target_file_path,
+              mypy_path, incremental_cache_path, mypy_cache_path,
+              params.range_type, params.range_start, params.branch,
+              params)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/misc/macs.el b/misc/macs.el
new file mode 100644
index 0000000..67d80aa
--- /dev/null
+++ b/misc/macs.el
@@ -0,0 +1,22 @@
+; Example Emacs integration; shows type of expression in region.
+
+(defun mypy-show-region ()
+  "Show type of variable at point."
+  (interactive)
+  (let ((here (region-beginning))
+        (there (region-end))
+        (filename (buffer-file-name)))
+    (let ((hereline (line-number-at-pos here))
+          (herecol (save-excursion (goto-char here) (current-column)))
+          (thereline (line-number-at-pos there))
+          (therecol (save-excursion (goto-char there) (current-column))))
+      (shell-command
+       (format "cd ~/src/mypy; python3 ./scripts/find_type.py %s %s %s %s %s python3 -m mypy -i mypy"
+               filename hereline herecol thereline therecol)
+       )
+      )
+    )
+  )
+
+; I like to bind this to ^X-t.
+(global-set-key "\C-xt" 'mypy-show-region)
diff --git a/misc/perf_checker.py b/misc/perf_checker.py
new file mode 100644
index 0000000..e55f8cc
--- /dev/null
+++ b/misc/perf_checker.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python3
+
+from typing import Callable, List, Tuple
+
+import os
+import shutil
+import statistics
+import subprocess
+import textwrap
+import time
+
+
+class Command:
+    def __init__(self, setup: Callable[[], None], command: Callable[[], None]) -> None:
+        self.setup = setup
+        self.command = command
+
+
+def print_offset(text: str, indent_length: int = 4) -> None:
+    print()
+    print(textwrap.indent(text, ' ' * indent_length))
+    print()
+
+
+def delete_folder(folder_path: str) -> None:
+    if os.path.exists(folder_path):
+        shutil.rmtree(folder_path)
+
+
+def execute(command: List[str]) -> None:
+    proc = subprocess.Popen(
+        ' '.join(command),
+        stderr=subprocess.PIPE,
+        stdout=subprocess.PIPE,
+        shell=True)
+    stdout_bytes, stderr_bytes = proc.communicate()  # type: Tuple[bytes, bytes]
+    stdout, stderr = stdout_bytes.decode('utf-8'), stderr_bytes.decode('utf-8')
+    if proc.returncode != 0:
+        print('EXECUTED COMMAND:', repr(command))
+        print('RETURN CODE:', proc.returncode)
+        print()
+        print('STDOUT:')
+        print_offset(stdout)
+        print('STDERR:')
+        print_offset(stderr)
+        raise RuntimeError('Unexpected error from external tool.')
+
+
+def trial(num_trials: int, command: Command) -> List[float]:
+    trials = []
+    for i in range(num_trials):
+        command.setup()
+        start = time.time()
+        command.command()
+        delta = time.time() - start
+        trials.append(delta)
+    return trials
+
+
+def report(name: str, times: List[float]) -> None:
+    print("{}:".format(name))
+    print("  Times: {}".format(times))
+    print("  Mean:  {}".format(statistics.mean(times)))
+    print("  Stdev: {}".format(statistics.stdev(times)))
+    print()
+
+
+def main() -> None:
+    trials = 3
+
+    print("Testing baseline")
+    baseline = trial(trials, Command(
+        lambda: None,
+        lambda: execute(["python3", "-m", "mypy", "mypy"])))
+    report("Baseline", baseline)
+
+    print("Testing cold cache")
+    cold_cache = trial(trials, Command(
+        lambda: delete_folder(".mypy_cache"),
+        lambda: execute(["python3", "-m", "mypy", "-i", "mypy"])))
+    report("Cold cache", cold_cache)
+
+    print("Testing warm cache")
+    execute(["python3", "-m", "mypy", "-i", "mypy"])
+    warm_cache = trial(trials, Command(
+        lambda: None,
+        lambda: execute(["python3", "-m", "mypy", "-i", "mypy"])))
+    report("Warm cache", warm_cache)
+
+
+if __name__ == '__main__':
+    main()
+
diff --git a/misc/remove-eol-whitespace.sh b/misc/remove-eol-whitespace.sh
new file mode 100644
index 0000000..3da6b9d
--- /dev/null
+++ b/misc/remove-eol-whitespace.sh
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+# Remove trailing whitespace from all non-binary files in a git repo.
+
+# From https://gist.github.com/dpaluy/3690668; originally from here:
+# http://unix.stackexchange.com/questions/36233/how-to-skip-file-in-sed-if-it-contains-regex/36240#36240
+
+git grep -I --name-only -z -e '' | xargs -0 sed -i -e 's/[ \t]\+\(\r\?\)$/\1/'
diff --git a/misc/test_case_to_actual.py b/misc/test_case_to_actual.py
new file mode 100644
index 0000000..9a91bb1
--- /dev/null
+++ b/misc/test_case_to_actual.py
@@ -0,0 +1,71 @@
+from typing import Iterator, List
+import sys
+import os
+import os.path
+
+
+class Chunk:
+    def __init__(self, header_type: str, args: str) -> None:
+        self.header_type = header_type
+        self.args = args
+        self.lines = []  # type: List[str]
+
+
+def is_header(line: str) -> bool:
+    return line.startswith('[') and line.endswith(']')
+
+
+def normalize(lines: Iterator[str]) -> Iterator[str]:
+    return (line.rstrip() for line in lines)
+
+
+def produce_chunks(lines: Iterator[str]) -> Iterator[Chunk]:
+    current_chunk = None  # type: Chunk
+    for line in normalize(lines):
+        if is_header(line):
+            if current_chunk is not None:
+                yield current_chunk
+            parts = line[1:-1].split(' ', 1)
+            args = parts[1] if len(parts) > 1 else ''
+            current_chunk = Chunk(parts[0], args)
+        else:
+            current_chunk.lines.append(line)
+    if current_chunk is not None:
+        yield current_chunk
+
+
+def write_out(filename: str, lines: List[str]) -> None:
+    os.makedirs(os.path.dirname(filename), exist_ok=True)
+    with open(filename, 'w') as stream:
+        stream.write('\n'.join(lines))
+
+
+def write_tree(root: str, chunks: Iterator[Chunk]) -> None:
+    init = next(chunks)
+    assert init.header_type == 'case'
+    
+    root = os.path.join(root, init.args)
+    write_out(os.path.join(root, 'main.py'), init.lines)
+
+    for chunk in chunks:
+        if chunk.header_type == 'file' and chunk.args.endswith('.py'):
+            write_out(os.path.join(root, chunk.args), chunk.lines)
+
+
+def help() -> None:
+    print("Usage: python misc/test_case_to_actual.py test_file.txt root_path")
+
+
+def main() -> None:
+    if len(sys.argv) != 3:
+        help()
+        return
+
+    test_file_path, root_path = sys.argv[1], sys.argv[2]
+    with open(test_file_path, 'r') as stream:
+        chunks = produce_chunks(iter(stream))
+        write_tree(root_path, chunks)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/misc/touch_checker.py b/misc/touch_checker.py
new file mode 100644
index 0000000..c44afe4
--- /dev/null
+++ b/misc/touch_checker.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python3
+
+from typing import Callable, List, Tuple, Optional
+
+import sys
+import glob
+import os
+import shutil
+import statistics
+import subprocess
+import textwrap
+import time
+
+
+def print_offset(text: str, indent_length: int = 4) -> None:
+    print()
+    print(textwrap.indent(text, ' ' * indent_length))
+    print()
+
+
+def delete_folder(folder_path: str) -> None:
+    if os.path.exists(folder_path):
+        shutil.rmtree(folder_path)
+
+
+def execute(command: List[str]) -> None:
+    proc = subprocess.Popen(
+        ' '.join(command),
+        stderr=subprocess.PIPE,
+        stdout=subprocess.PIPE,
+        shell=True)
+    stdout_bytes, stderr_bytes = proc.communicate()  # type: Tuple[bytes, bytes]
+    stdout, stderr = stdout_bytes.decode('utf-8'), stderr_bytes.decode('utf-8')
+    if proc.returncode != 0:
+        print('EXECUTED COMMAND:', repr(command))
+        print('RETURN CODE:', proc.returncode)
+        print()
+        print('STDOUT:')
+        print_offset(stdout)
+        print('STDERR:')
+        print_offset(stderr)
+        print()
+
+
+Command = Callable[[], None]
+
+
+def test(setup: Command, command: Command, teardown: Command) -> float:
+    setup()
+    start = time.time()
+    command()
+    end = time.time() - start
+    teardown()
+    return end
+
+
+def make_touch_wrappers(filename: str) -> Tuple[Command, Command]:
+    def setup() -> None:
+        execute(["touch", filename])
+    def teardown() -> None:
+        pass
+    return setup, teardown
+
+
+def make_change_wrappers(filename: str) -> Tuple[Command, Command]:
+    copy = None  # type: Optional[str]
+
+    def setup() -> None:
+        nonlocal copy
+        with open(filename, 'r') as stream:
+            copy = stream.read()
+        with open(filename, 'a') as stream:
+            stream.write('\n\nfoo = 3')
+
+    def teardown() -> None:
+        assert copy is not None
+        with open(filename, 'w') as stream:
+            stream.write(copy)
+
+        # Re-run to reset cache
+        execute(["python3", "-m", "mypy", "-i", "mypy"]),
+
+    return setup, teardown
+
+def main() -> None:
+    if len(sys.argv) != 2 or sys.argv[1] not in {'touch', 'change'}:
+        print("First argument should be 'touch' or 'change'")
+        return
+
+    if sys.argv[1] == 'touch':
+        make_wrappers = make_touch_wrappers
+        verb = "Touching"
+    elif sys.argv[1] == 'change':
+        make_wrappers = make_change_wrappers
+        verb = "Changing"
+    else:
+        raise AssertionError()
+
+    print("Setting up...")
+
+    baseline = test(
+        lambda: None,
+        lambda: execute(["python3", "-m", "mypy", "mypy"]),
+        lambda: None)
+    print("Baseline:   {}".format(baseline))
+
+    cold = test(
+        lambda: delete_folder(".mypy_cache"),
+        lambda: execute(["python3", "-m", "mypy", "-i", "mypy"]),
+        lambda: None)
+    print("Cold cache: {}".format(cold))
+
+    warm = test(
+        lambda: None,
+        lambda: execute(["python3", "-m", "mypy", "-i", "mypy"]),
+        lambda: None)
+    print("Warm cache: {}".format(warm))
+
+    print()
+
+    deltas = []
+    for filename in glob.iglob("mypy/**/*.py", recursive=True):
+        print("{} {}".format(verb, filename))
+        
+        setup, teardown = make_wrappers(filename)
+        delta = test(
+            setup,
+            lambda: execute(["python3", "-m", "mypy", "-i", "mypy"]),
+            teardown)
+        print("    Time: {}".format(delta))
+        deltas.append(delta)
+    print()
+
+    print("Initial:")
+    print("    Baseline:   {}".format(baseline))
+    print("    Cold cache: {}".format(cold))
+    print("    Warm cache: {}".format(warm))
+    print()
+    print("Aggregate:")
+    print("    Times:      {}".format(deltas))
+    print("    Mean:       {}".format(statistics.mean(deltas)))
+    print("    Median:     {}".format(statistics.median(deltas)))
+    print("    Stdev:      {}".format(statistics.stdev(deltas)))
+    print("    Min:        {}".format(min(deltas)))
+    print("    Max:        {}".format(max(deltas)))
+    print("    Total:      {}".format(sum(deltas)))
+    print()
+
+if __name__ == '__main__':
+    main()
+
diff --git a/misc/variadics.py b/misc/variadics.py
new file mode 100644
index 0000000..9200288
--- /dev/null
+++ b/misc/variadics.py
@@ -0,0 +1,54 @@
+"""Example of code generation approach to variadics.
+
+See https://github.com/python/typing/issues/193#issuecomment-236383893
+"""
+
+LIMIT = 5
+BOUND = 'object'
+
+def prelude(limit: int, bound: str) -> None:
+    print('from typing import Callable, Iterable, Iterator, Tuple, TypeVar, overload')
+    print('Ts = TypeVar(\'Ts\', bound={bound})'.format(bound=bound))
+    print('R = TypeVar(\'R\')')
+    for i in range(LIMIT):
+        print('T{i} = TypeVar(\'T{i}\', bound={bound})'.format(i=i+1, bound=bound))
+
+def expand_template(template: str,
+                    arg_template: str = 'arg{i}: {Ts}',
+                    lower: int = 0,
+                    limit: int = LIMIT) -> None:
+    print()
+    for i in range(lower, limit):
+        tvs = ', '.join('T{i}'.format(i=j+1) for j in range(i))
+        args = ', '.join(arg_template.format(i=j+1, Ts='T{}'.format(j+1))
+                         for j in range(i))
+        print('@overload')
+        s = template.format(Ts=tvs, argsTs=args)
+        s = s.replace('Tuple[]', 'Tuple[()]')
+        print(s)
+    args_l = [arg_template.format(i=j+1, Ts='Ts') for j in range(limit)]
+    args_l.append('*' + (arg_template.format(i='s', Ts='Ts')))
+    args = ', '.join(args_l)
+    s = template.format(Ts='Ts, ...', argsTs=args)
+    s = s.replace('Callable[[Ts, ...]', 'Callable[...')
+    print('@overload')
+    print(s)
+
+def main():
+    prelude(LIMIT, BOUND)
+
+    # map()
+    expand_template('def map(func: Callable[[{Ts}], R], {argsTs}) -> R: ...',
+                    lower=1)
+    # zip()
+    expand_template('def zip({argsTs}) -> Tuple[{Ts}]: ...')
+
+    # Naomi's examples
+    expand_template('def my_zip({argsTs}) -> Iterator[Tuple[{Ts}]]: ...',
+                    'arg{i}: Iterable[{Ts}]')
+    expand_template('def make_check({argsTs}) -> Callable[[{Ts}], bool]: ...')
+    expand_template('def my_map(f: Callable[[{Ts}], R], {argsTs}) -> Iterator[R]: ...',
+                    'arg{i}: Iterable[{Ts}]')
+                    
+
+main()
diff --git a/mypy.egg-info/PKG-INFO b/mypy.egg-info/PKG-INFO
index c9cf741..236181d 100644
--- a/mypy.egg-info/PKG-INFO
+++ b/mypy.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: mypy
-Version: 0.470
+Version: 0.480.dev0
 Summary: Optional static typing for Python
 Home-page: http://www.mypy-lang.org/
 Author: Jukka Lehtosalo
diff --git a/mypy.egg-info/SOURCES.txt b/mypy.egg-info/SOURCES.txt
index 6497a7c..5694d26 100644
--- a/mypy.egg-info/SOURCES.txt
+++ b/mypy.egg-info/SOURCES.txt
@@ -1,6 +1,70 @@
+.gitignore
+.gitmodules
+.travis.yml
+CONTRIBUTING.md
+CREDITS
+LICENSE
 MANIFEST.in
+README.md
+appveyor.yml
+build-requirements.txt
+conftest.py
+mypy_self_check.ini
+mypy_strict_optional.ini
+pytest.ini
+runtests.py
 setup.cfg
 setup.py
+test-requirements.txt
+typeshed
+docs/Makefile
+docs/README.md
+docs/make.bat
+docs/requirements-docs.txt
+docs/source/additional_features.rst
+docs/source/basics.rst
+docs/source/builtin_types.rst
+docs/source/casts.rst
+docs/source/cheat_sheet.rst
+docs/source/cheat_sheet_py3.rst
+docs/source/class_basics.rst
+docs/source/command_line.rst
+docs/source/common_issues.rst
+docs/source/conf.py
+docs/source/config_file.rst
+docs/source/duck_type_compatibility.rst
+docs/source/dynamic_typing.rst
+docs/source/faq.rst
+docs/source/function_overloading.rst
+docs/source/generics.rst
+docs/source/getting_started.rst
+docs/source/index.rst
+docs/source/introduction.rst
+docs/source/kinds_of_types.rst
+docs/source/python2.rst
+docs/source/python36.rst
+docs/source/revision_history.rst
+docs/source/supported_python_features.rst
+docs/source/type_inference_and_annotations.rst
+extensions/README.md
+extensions/mypy_extensions.py
+extensions/setup.py
+lib-typing/2.7/setup.py
+lib-typing/2.7/test_typing.py
+lib-typing/2.7/typing.py
+lib-typing/3.2/test_typing.py
+lib-typing/3.2/typing.py
+misc/actions_stubs.py
+misc/analyze_cache.py
+misc/async_matrix.py
+misc/fix_annotate.py
+misc/incremental_checker.py
+misc/macs.el
+misc/perf_checker.py
+misc/remove-eol-whitespace.sh
+misc/test_case_to_actual.py
+misc/touch_checker.py
+misc/variadics.py
 mypy/__init__.py
 mypy/__main__.py
 mypy/api.py
@@ -51,6 +115,7 @@ mypy/treetransform.py
 mypy/typeanal.py
 mypy/typefixture.py
 mypy/types.py
+mypy/typevars.py
 mypy/util.py
 mypy/version.py
 mypy/visitor.py
@@ -60,10 +125,212 @@ mypy.egg-info/SOURCES.txt
 mypy.egg-info/dependency_links.txt
 mypy.egg-info/requires.txt
 mypy.egg-info/top_level.txt
+mypy/myunit/__init__.py
+mypy/myunit/__main__.py
+mypy/test/__init__.py
+mypy/test/collect.py
+mypy/test/config.py
+mypy/test/data.py
+mypy/test/helpers.py
+mypy/test/testargs.py
+mypy/test/testcheck.py
+mypy/test/testcmdline.py
+mypy/test/testextensions.py
+mypy/test/testgraph.py
+mypy/test/testinfer.py
+mypy/test/testlex.py
+mypy/test/testmoduleinfo.py
+mypy/test/testparse.py
+mypy/test/testpythoneval.py
+mypy/test/testreports.py
+mypy/test/testsemanal.py
+mypy/test/testsolve.py
+mypy/test/teststubgen.py
+mypy/test/testsubtypes.py
+mypy/test/testtransform.py
+mypy/test/testtypegen.py
+mypy/test/testtypes.py
+mypy/test/update.py
+pinfer/.gitignore
+pinfer/LICENSE
+pinfer/README
+pinfer/__init__.py
+pinfer/inspect3.py
+pinfer/p.py
+pinfer/pinfer.py
+pinfer/test_pinfer.py
+pinfer/test_pinfer3.py
+pinfer/unparse.py
+pinfer/unparse3.py
 scripts/find_type.py
 scripts/mypy
 scripts/mypy.bat
 scripts/stubgen
+test-data/.flake8
+test-data/samples/bottles.py
+test-data/samples/class.py
+test-data/samples/cmdline.py
+test-data/samples/crawl.py
+test-data/samples/crawl2.py
+test-data/samples/dict.py
+test-data/samples/fib.py
+test-data/samples/files.py
+test-data/samples/for.py
+test-data/samples/generators.py
+test-data/samples/greet.py
+test-data/samples/guess.py
+test-data/samples/hello.py
+test-data/samples/input.py
+test-data/samples/itertool.py
+test-data/samples/readme.txt
+test-data/samples/regexp.py
+test-data/stdlib-samples/3.2/base64.py
+test-data/stdlib-samples/3.2/fnmatch.py
+test-data/stdlib-samples/3.2/genericpath.py
+test-data/stdlib-samples/3.2/getopt.py
+test-data/stdlib-samples/3.2/glob.py
+test-data/stdlib-samples/3.2/posixpath.py
+test-data/stdlib-samples/3.2/pprint.py
+test-data/stdlib-samples/3.2/random.py
+test-data/stdlib-samples/3.2/shutil.py
+test-data/stdlib-samples/3.2/subprocess.py
+test-data/stdlib-samples/3.2/tempfile.py
+test-data/stdlib-samples/3.2/textwrap.py
+test-data/stdlib-samples/3.2/incomplete/logging/__init__.py
+test-data/stdlib-samples/3.2/incomplete/urllib/__init__.py
+test-data/stdlib-samples/3.2/incomplete/urllib/parse.py
+test-data/stdlib-samples/3.2/test/__init__.py
+test-data/stdlib-samples/3.2/test/randv2_32.pck
+test-data/stdlib-samples/3.2/test/randv2_64.pck
+test-data/stdlib-samples/3.2/test/randv3.pck
+test-data/stdlib-samples/3.2/test/support.py
+test-data/stdlib-samples/3.2/test/test_base64.py
+test-data/stdlib-samples/3.2/test/test_fnmatch.py
+test-data/stdlib-samples/3.2/test/test_genericpath.py
+test-data/stdlib-samples/3.2/test/test_getopt.py
+test-data/stdlib-samples/3.2/test/test_glob.py
+test-data/stdlib-samples/3.2/test/test_posixpath.py
+test-data/stdlib-samples/3.2/test/test_pprint.py
+test-data/stdlib-samples/3.2/test/test_random.py
+test-data/stdlib-samples/3.2/test/test_set.py
+test-data/stdlib-samples/3.2/test/test_shutil.py
+test-data/stdlib-samples/3.2/test/test_subprocess.py
+test-data/stdlib-samples/3.2/test/test_tempfile.py
+test-data/stdlib-samples/3.2/test/test_textwrap.py
+test-data/stdlib-samples/3.2/test/tf_inherit_check.py
+test-data/stdlib-samples/3.2/test/subprocessdata/fd_status.py
+test-data/stdlib-samples/3.2/test/subprocessdata/input_reader.py
+test-data/stdlib-samples/3.2/test/subprocessdata/qcat.py
+test-data/stdlib-samples/3.2/test/subprocessdata/qgrep.py
+test-data/stdlib-samples/3.2/test/subprocessdata/sigchild_ignore.py
+test-data/unit/check-abstract.test
+test-data/unit/check-async-await.test
+test-data/unit/check-basic.test
+test-data/unit/check-bound.test
+test-data/unit/check-callable.test
+test-data/unit/check-class-namedtuple.test
+test-data/unit/check-classes.test
+test-data/unit/check-columns.test
+test-data/unit/check-dynamic-typing.test
+test-data/unit/check-expressions.test
+test-data/unit/check-fastparse.test
+test-data/unit/check-flags.test
+test-data/unit/check-functions.test
+test-data/unit/check-generic-subtyping.test
+test-data/unit/check-generics.test
+test-data/unit/check-ignore.test
+test-data/unit/check-incremental.test
+test-data/unit/check-inference-context.test
+test-data/unit/check-inference.test
+test-data/unit/check-isinstance.test
+test-data/unit/check-kwargs.test
+test-data/unit/check-lists.test
+test-data/unit/check-modules.test
+test-data/unit/check-multiple-inheritance.test
+test-data/unit/check-namedtuple.test
+test-data/unit/check-newsyntax.test
+test-data/unit/check-newtype.test
+test-data/unit/check-optional.test
+test-data/unit/check-overloading.test
+test-data/unit/check-python2.test
+test-data/unit/check-selftype.test
+test-data/unit/check-semanal-error.test
+test-data/unit/check-statements.test
+test-data/unit/check-super.test
+test-data/unit/check-tuples.test
+test-data/unit/check-type-aliases.test
+test-data/unit/check-type-checks.test
+test-data/unit/check-type-promotion.test
+test-data/unit/check-typeddict.test
+test-data/unit/check-typevar-values.test
+test-data/unit/check-underscores.test
+test-data/unit/check-unions.test
+test-data/unit/check-unreachable-code.test
+test-data/unit/check-unsupported.test
+test-data/unit/check-varargs.test
+test-data/unit/check-warnings.test
+test-data/unit/cmdline.test
+test-data/unit/parse-errors.test
+test-data/unit/parse-python2.test
+test-data/unit/parse.test
+test-data/unit/python2eval.test
+test-data/unit/pythoneval-asyncio.test
+test-data/unit/pythoneval-enum.test
+test-data/unit/pythoneval.test
+test-data/unit/semanal-abstractclasses.test
+test-data/unit/semanal-basic.test
+test-data/unit/semanal-classes.test
+test-data/unit/semanal-errors.test
+test-data/unit/semanal-expressions.test
+test-data/unit/semanal-modules.test
+test-data/unit/semanal-namedtuple.test
+test-data/unit/semanal-python2.test
+test-data/unit/semanal-statements.test
+test-data/unit/semanal-symtable.test
+test-data/unit/semanal-typealiases.test
+test-data/unit/semanal-typeddict.test
+test-data/unit/semanal-typeinfo.test
+test-data/unit/semanal-types.test
+test-data/unit/stubgen.test
+test-data/unit/typexport-basic.test
+test-data/unit/fixtures/__new__.pyi
+test-data/unit/fixtures/alias.pyi
+test-data/unit/fixtures/args.pyi
+test-data/unit/fixtures/async_await.pyi
+test-data/unit/fixtures/bool.pyi
+test-data/unit/fixtures/callable.pyi
+test-data/unit/fixtures/classmethod.pyi
+test-data/unit/fixtures/complex.pyi
+test-data/unit/fixtures/dict.pyi
+test-data/unit/fixtures/exception.pyi
+test-data/unit/fixtures/for.pyi
+test-data/unit/fixtures/function.pyi
+test-data/unit/fixtures/isinstance.pyi
+test-data/unit/fixtures/isinstancelist.pyi
+test-data/unit/fixtures/list.pyi
+test-data/unit/fixtures/module.pyi
+test-data/unit/fixtures/module_all.pyi
+test-data/unit/fixtures/module_all_python2.pyi
+test-data/unit/fixtures/ops.pyi
+test-data/unit/fixtures/primitives.pyi
+test-data/unit/fixtures/property.pyi
+test-data/unit/fixtures/python2.pyi
+test-data/unit/fixtures/set.pyi
+test-data/unit/fixtures/slice.pyi
+test-data/unit/fixtures/staticmethod.pyi
+test-data/unit/fixtures/transform.pyi
+test-data/unit/fixtures/tuple-simple.pyi
+test-data/unit/fixtures/tuple.pyi
+test-data/unit/fixtures/union.pyi
+test-data/unit/lib-stub/__builtin__.pyi
+test-data/unit/lib-stub/abc.pyi
+test-data/unit/lib-stub/builtins.pyi
+test-data/unit/lib-stub/collections.pyi
+test-data/unit/lib-stub/mypy_extensions.pyi
+test-data/unit/lib-stub/sys.pyi
+test-data/unit/lib-stub/types.pyi
+test-data/unit/lib-stub/typing.pyi
+tmp-test-dirs/.gitignore
 typeshed/stdlib/2/BaseHTTPServer.pyi
 typeshed/stdlib/2/ConfigParser.pyi
 typeshed/stdlib/2/Cookie.pyi
@@ -176,7 +443,6 @@ typeshed/stdlib/2/strop.pyi
 typeshed/stdlib/2/struct.pyi
 typeshed/stdlib/2/subprocess.pyi
 typeshed/stdlib/2/sys.pyi
-typeshed/stdlib/2/syslog.pyi
 typeshed/stdlib/2/tempfile.pyi
 typeshed/stdlib/2/textwrap.pyi
 typeshed/stdlib/2/thread.pyi
@@ -215,13 +481,8 @@ typeshed/stdlib/2/os/path.pyi
 typeshed/stdlib/2/sqlite3/__init__.pyi
 typeshed/stdlib/2/sqlite3/dbapi2.pyi
 typeshed/stdlib/2/wsgiref/__init__.pyi
+typeshed/stdlib/2/wsgiref/types.pyi
 typeshed/stdlib/2/wsgiref/validate.pyi
-typeshed/stdlib/2/xml/__init__.pyi
-typeshed/stdlib/2/xml/etree/ElementInclude.pyi
-typeshed/stdlib/2/xml/etree/ElementPath.pyi
-typeshed/stdlib/2/xml/etree/ElementTree.pyi
-typeshed/stdlib/2/xml/etree/__init__.pyi
-typeshed/stdlib/2/xml/etree/cElementTree.pyi
 typeshed/stdlib/2and3/_bisect.pyi
 typeshed/stdlib/2and3/_heapq.pyi
 typeshed/stdlib/2and3/argparse.pyi
@@ -252,6 +513,7 @@ typeshed/stdlib/2and3/pstats.pyi
 typeshed/stdlib/2and3/readline.pyi
 typeshed/stdlib/2and3/rlcompleter.pyi
 typeshed/stdlib/2and3/site.pyi
+typeshed/stdlib/2and3/syslog.pyi
 typeshed/stdlib/2and3/tarfile.pyi
 typeshed/stdlib/2and3/termios.pyi
 typeshed/stdlib/2and3/threading.pyi
@@ -310,6 +572,11 @@ typeshed/stdlib/2and3/logging/__init__.pyi
 typeshed/stdlib/2and3/logging/config.pyi
 typeshed/stdlib/2and3/logging/handlers.pyi
 typeshed/stdlib/2and3/xml/__init__.pyi
+typeshed/stdlib/2and3/xml/etree/ElementInclude.pyi
+typeshed/stdlib/2and3/xml/etree/ElementPath.pyi
+typeshed/stdlib/2and3/xml/etree/ElementTree.pyi
+typeshed/stdlib/2and3/xml/etree/__init__.pyi
+typeshed/stdlib/2and3/xml/etree/cElementTree.pyi
 typeshed/stdlib/2and3/xml/sax/__init__.pyi
 typeshed/stdlib/2and3/xml/sax/handler.pyi
 typeshed/stdlib/2and3/xml/sax/saxutils.pyi
@@ -408,19 +675,7 @@ typeshed/stdlib/3/unicodedata.pyi
 typeshed/stdlib/3/uuid.pyi
 typeshed/stdlib/3/weakref.pyi
 typeshed/stdlib/3/zlib.pyi
-typeshed/stdlib/3.2/xml/__init__.pyi
-typeshed/stdlib/3.2/xml/etree/ElementInclude.pyi
-typeshed/stdlib/3.2/xml/etree/ElementPath.pyi
-typeshed/stdlib/3.2/xml/etree/ElementTree.pyi
-typeshed/stdlib/3.2/xml/etree/__init__.pyi
-typeshed/stdlib/3.2/xml/etree/cElementTree.pyi
 typeshed/stdlib/3.3/ipaddress.pyi
-typeshed/stdlib/3.3/xml/__init__.pyi
-typeshed/stdlib/3.3/xml/etree/ElementInclude.pyi
-typeshed/stdlib/3.3/xml/etree/ElementPath.pyi
-typeshed/stdlib/3.3/xml/etree/ElementTree.pyi
-typeshed/stdlib/3.3/xml/etree/__init__.pyi
-typeshed/stdlib/3.3/xml/etree/cElementTree.pyi
 typeshed/stdlib/3.4/_stat.pyi
 typeshed/stdlib/3.4/_tracemalloc.pyi
 typeshed/stdlib/3.4/enum.pyi
@@ -437,18 +692,6 @@ typeshed/stdlib/3.4/asyncio/streams.pyi
 typeshed/stdlib/3.4/asyncio/subprocess.pyi
 typeshed/stdlib/3.4/asyncio/tasks.pyi
 typeshed/stdlib/3.4/asyncio/transports.pyi
-typeshed/stdlib/3.4/xml/__init__.pyi
-typeshed/stdlib/3.4/xml/etree/ElementInclude.pyi
-typeshed/stdlib/3.4/xml/etree/ElementPath.pyi
-typeshed/stdlib/3.4/xml/etree/ElementTree.pyi
-typeshed/stdlib/3.4/xml/etree/__init__.pyi
-typeshed/stdlib/3.4/xml/etree/cElementTree.pyi
-typeshed/stdlib/3.5/xml/__init__.pyi
-typeshed/stdlib/3.5/xml/etree/ElementInclude.pyi
-typeshed/stdlib/3.5/xml/etree/ElementPath.pyi
-typeshed/stdlib/3.5/xml/etree/ElementTree.pyi
-typeshed/stdlib/3.5/xml/etree/__init__.pyi
-typeshed/stdlib/3.5/xml/etree/cElementTree.pyi
 typeshed/stdlib/3.6/secrets.pyi
 typeshed/stdlib/3/collections/__init__.pyi
 typeshed/stdlib/3/collections/abc.pyi
@@ -515,12 +758,8 @@ typeshed/stdlib/3/urllib/request.pyi
 typeshed/stdlib/3/urllib/response.pyi
 typeshed/stdlib/3/urllib/robotparser.pyi
 typeshed/stdlib/3/wsgiref/__init__.pyi
+typeshed/stdlib/3/wsgiref/types.pyi
 typeshed/stdlib/3/wsgiref/validate.pyi
-typeshed/stdlib/3/xml/etree/ElementInclude.pyi
-typeshed/stdlib/3/xml/etree/ElementPath.pyi
-typeshed/stdlib/3/xml/etree/ElementTree.pyi
-typeshed/stdlib/3/xml/etree/__init__.pyi
-typeshed/stdlib/3/xml/etree/cElementTree.pyi
 typeshed/tests/mypy_test.py
 typeshed/tests/pytype_test.py
 typeshed/third_party/2/croniter.pyi
@@ -533,9 +772,19 @@ typeshed/third_party/2/OpenSSL/__init__.pyi
 typeshed/third_party/2/OpenSSL/crypto.pyi
 typeshed/third_party/2/concurrent/__init__.pyi
 typeshed/third_party/2/concurrent/futures/__init__.pyi
+typeshed/third_party/2/cryptography/__init__.pyi
+typeshed/third_party/2/cryptography/hazmat/__init__.pyi
+typeshed/third_party/2/cryptography/hazmat/primitives/__init__.pyi
+typeshed/third_party/2/cryptography/hazmat/primitives/serialization.pyi
+typeshed/third_party/2/cryptography/hazmat/primitives/asymmetric/__init__.pyi
+typeshed/third_party/2/cryptography/hazmat/primitives/asymmetric/dsa.pyi
+typeshed/third_party/2/cryptography/hazmat/primitives/asymmetric/rsa.pyi
 typeshed/third_party/2/dateutil/__init__.pyi
 typeshed/third_party/2/dateutil/parser.pyi
 typeshed/third_party/2/dateutil/relativedelta.pyi
+typeshed/third_party/2/dateutil/tz/__init__.pyi
+typeshed/third_party/2/dateutil/tz/_common.pyi
+typeshed/third_party/2/dateutil/tz/tz.pyi
 typeshed/third_party/2/fb303/FacebookService.pyi
 typeshed/third_party/2/fb303/__init__.pyi
 typeshed/third_party/2/google/__init__.pyi
@@ -623,44 +872,6 @@ typeshed/third_party/2/six/moves/urllib/parse.pyi
 typeshed/third_party/2/six/moves/urllib/request.pyi
 typeshed/third_party/2/six/moves/urllib/response.pyi
 typeshed/third_party/2/six/moves/urllib/robotparser.pyi
-typeshed/third_party/2/sqlalchemy/__init__.pyi
-typeshed/third_party/2/sqlalchemy/exc.pyi
-typeshed/third_party/2/sqlalchemy/inspection.pyi
-typeshed/third_party/2/sqlalchemy/log.pyi
-typeshed/third_party/2/sqlalchemy/pool.pyi
-typeshed/third_party/2/sqlalchemy/schema.pyi
-typeshed/third_party/2/sqlalchemy/types.pyi
-typeshed/third_party/2/sqlalchemy/databases/__init__.pyi
-typeshed/third_party/2/sqlalchemy/databases/mysql.pyi
-typeshed/third_party/2/sqlalchemy/dialects/__init__.pyi
-typeshed/third_party/2/sqlalchemy/dialects/mysql/__init__.pyi
-typeshed/third_party/2/sqlalchemy/dialects/mysql/base.pyi
-typeshed/third_party/2/sqlalchemy/engine/__init__.pyi
-typeshed/third_party/2/sqlalchemy/engine/base.pyi
-typeshed/third_party/2/sqlalchemy/engine/strategies.pyi
-typeshed/third_party/2/sqlalchemy/engine/url.pyi
-typeshed/third_party/2/sqlalchemy/orm/__init__.pyi
-typeshed/third_party/2/sqlalchemy/orm/session.pyi
-typeshed/third_party/2/sqlalchemy/sql/__init__.pyi
-typeshed/third_party/2/sqlalchemy/sql/annotation.pyi
-typeshed/third_party/2/sqlalchemy/sql/base.pyi
-typeshed/third_party/2/sqlalchemy/sql/ddl.pyi
-typeshed/third_party/2/sqlalchemy/sql/dml.pyi
-typeshed/third_party/2/sqlalchemy/sql/elements.pyi
-typeshed/third_party/2/sqlalchemy/sql/expression.pyi
-typeshed/third_party/2/sqlalchemy/sql/functions.pyi
-typeshed/third_party/2/sqlalchemy/sql/naming.pyi
-typeshed/third_party/2/sqlalchemy/sql/operators.pyi
-typeshed/third_party/2/sqlalchemy/sql/schema.pyi
-typeshed/third_party/2/sqlalchemy/sql/selectable.pyi
-typeshed/third_party/2/sqlalchemy/sql/sqltypes.pyi
-typeshed/third_party/2/sqlalchemy/sql/type_api.pyi
-typeshed/third_party/2/sqlalchemy/sql/visitors.pyi
-typeshed/third_party/2/sqlalchemy/util/__init__.pyi
-typeshed/third_party/2/sqlalchemy/util/_collections.pyi
-typeshed/third_party/2/sqlalchemy/util/compat.pyi
-typeshed/third_party/2/sqlalchemy/util/deprecations.pyi
-typeshed/third_party/2/sqlalchemy/util/langhelpers.pyi
 typeshed/third_party/2/thrift/Thrift.pyi
 typeshed/third_party/2/thrift/__init__.pyi
 typeshed/third_party/2/thrift/protocol/TBinaryProtocol.pyi
@@ -856,8 +1067,62 @@ typeshed/third_party/2and3/markupsafe/_compat.pyi
 typeshed/third_party/2and3/markupsafe/_constants.pyi
 typeshed/third_party/2and3/markupsafe/_native.pyi
 typeshed/third_party/2and3/markupsafe/_speedups.pyi
+typeshed/third_party/2and3/pymysql/__init__.pyi
+typeshed/third_party/2and3/pymysql/charset.pyi
+typeshed/third_party/2and3/pymysql/connections.pyi
+typeshed/third_party/2and3/pymysql/converters.pyi
+typeshed/third_party/2and3/pymysql/cursors.pyi
+typeshed/third_party/2and3/pymysql/err.pyi
+typeshed/third_party/2and3/pymysql/times.pyi
+typeshed/third_party/2and3/pymysql/util.pyi
+typeshed/third_party/2and3/pymysql/constants/CLIENT.pyi
+typeshed/third_party/2and3/pymysql/constants/COMMAND.pyi
+typeshed/third_party/2and3/pymysql/constants/ER.pyi
+typeshed/third_party/2and3/pymysql/constants/FIELD_TYPE.pyi
+typeshed/third_party/2and3/pymysql/constants/FLAG.pyi
+typeshed/third_party/2and3/pymysql/constants/SERVER_STATUS.pyi
+typeshed/third_party/2and3/pymysql/constants/__init__.pyi
 typeshed/third_party/2and3/pytz/__init__.pyi
 typeshed/third_party/2and3/pytz/lazy.pyi
+typeshed/third_party/2and3/sqlalchemy/__init__.pyi
+typeshed/third_party/2and3/sqlalchemy/exc.pyi
+typeshed/third_party/2and3/sqlalchemy/inspection.pyi
+typeshed/third_party/2and3/sqlalchemy/log.pyi
+typeshed/third_party/2and3/sqlalchemy/pool.pyi
+typeshed/third_party/2and3/sqlalchemy/schema.pyi
+typeshed/third_party/2and3/sqlalchemy/types.pyi
+typeshed/third_party/2and3/sqlalchemy/databases/__init__.pyi
+typeshed/third_party/2and3/sqlalchemy/databases/mysql.pyi
+typeshed/third_party/2and3/sqlalchemy/dialects/__init__.pyi
+typeshed/third_party/2and3/sqlalchemy/dialects/mysql/__init__.pyi
+typeshed/third_party/2and3/sqlalchemy/dialects/mysql/base.pyi
+typeshed/third_party/2and3/sqlalchemy/engine/__init__.pyi
+typeshed/third_party/2and3/sqlalchemy/engine/base.pyi
+typeshed/third_party/2and3/sqlalchemy/engine/strategies.pyi
+typeshed/third_party/2and3/sqlalchemy/engine/url.pyi
+typeshed/third_party/2and3/sqlalchemy/orm/__init__.pyi
+typeshed/third_party/2and3/sqlalchemy/orm/session.pyi
+typeshed/third_party/2and3/sqlalchemy/orm/util.pyi
+typeshed/third_party/2and3/sqlalchemy/sql/__init__.pyi
+typeshed/third_party/2and3/sqlalchemy/sql/annotation.pyi
+typeshed/third_party/2and3/sqlalchemy/sql/base.pyi
+typeshed/third_party/2and3/sqlalchemy/sql/ddl.pyi
+typeshed/third_party/2and3/sqlalchemy/sql/dml.pyi
+typeshed/third_party/2and3/sqlalchemy/sql/elements.pyi
+typeshed/third_party/2and3/sqlalchemy/sql/expression.pyi
+typeshed/third_party/2and3/sqlalchemy/sql/functions.pyi
+typeshed/third_party/2and3/sqlalchemy/sql/naming.pyi
+typeshed/third_party/2and3/sqlalchemy/sql/operators.pyi
+typeshed/third_party/2and3/sqlalchemy/sql/schema.pyi
+typeshed/third_party/2and3/sqlalchemy/sql/selectable.pyi
+typeshed/third_party/2and3/sqlalchemy/sql/sqltypes.pyi
+typeshed/third_party/2and3/sqlalchemy/sql/type_api.pyi
+typeshed/third_party/2and3/sqlalchemy/sql/visitors.pyi
+typeshed/third_party/2and3/sqlalchemy/util/__init__.pyi
+typeshed/third_party/2and3/sqlalchemy/util/_collections.pyi
+typeshed/third_party/2and3/sqlalchemy/util/compat.pyi
+typeshed/third_party/2and3/sqlalchemy/util/deprecations.pyi
+typeshed/third_party/2and3/sqlalchemy/util/langhelpers.pyi
 typeshed/third_party/3/enum.pyi
 typeshed/third_party/3/itsdangerous.pyi
 typeshed/third_party/3/pkg_resources.pyi
@@ -874,6 +1139,9 @@ typeshed/third_party/3.6/click/utils.pyi
 typeshed/third_party/3/dateutil/__init__.pyi
 typeshed/third_party/3/dateutil/parser.pyi
 typeshed/third_party/3/dateutil/relativedelta.pyi
+typeshed/third_party/3/dateutil/tz/__init__.pyi
+typeshed/third_party/3/dateutil/tz/_common.pyi
+typeshed/third_party/3/dateutil/tz/tz.pyi
 typeshed/third_party/3/docutils/__init__.pyi
 typeshed/third_party/3/docutils/examples.pyi
 typeshed/third_party/3/docutils/nodes.pyi
diff --git a/mypy/api.py b/mypy/api.py
index 80f8d93..f6d0d5a 100644
--- a/mypy/api.py
+++ b/mypy/api.py
@@ -5,11 +5,16 @@ It just mimics command line activation without starting a new interpreter.
 So the normal docs about the mypy command line apply.
 Changes in the command line version of mypy will be immediately useable.
 
-Just import this module and then call the 'run' function with exactly the
-string you would have passed to mypy from the command line.
-Function 'run' returns a tuple of strings: (<normal_report>, <error_report>),
-in which <normal_report> is what mypy normally writes to sys.stdout and
-<error_report> is what mypy normally writes to sys.stderr.
+Just import this module and then call the 'run' function with a parameter of
+type List[str], containing what normally would have been the command line
+arguments to mypy.
+
+Function 'run' returns a Tuple[str, str, int], namely
+(<normal_report>, <error_report>, <exit_status>),
+in which <normal_report> is what mypy normally writes to sys.stdout,
+<error_report> is what mypy normally writes to sys.stderr and exit_status is
+the exit status mypy normally returns to the operating system.
+
 Any pretty formatting is left to the caller.
 
 Trivial example of code using this module:
@@ -17,7 +22,7 @@ Trivial example of code using this module:
 import sys
 from mypy import api
 
-result = api.run(' '.join(sys.argv[1:]))
+result = api.run(sys.argv[1:])
 
 if result[0]:
     print('\nType checking report:\n')
@@ -26,16 +31,18 @@ if result[0]:
 if result[1]:
     print('\nError report:\n')
     print(result[1])  # stderr
+
+print ('\nExit status:', result[2])
 """
 
 import sys
 from io import StringIO
-from typing import Tuple
+from typing import List, Tuple
 from mypy.main import main
 
 
-def run(params: str) -> Tuple[str, str]:
-    sys.argv = [''] + params.split()
+def run(params: List[str]) -> Tuple[str, str, int]:
+    sys.argv = [''] + params
 
     old_stdout = sys.stdout
     new_stdout = StringIO()
@@ -47,10 +54,11 @@ def run(params: str) -> Tuple[str, str]:
 
     try:
         main(None)
-    except SystemExit:
-        pass
+        exit_status = 0
+    except SystemExit as system_exit:
+        exit_status = system_exit.code
 
     sys.stdout = old_stdout
     sys.stderr = old_stderr
 
-    return new_stdout.getvalue(), new_stderr.getvalue()
+    return new_stdout.getvalue(), new_stderr.getvalue(), exit_status
diff --git a/mypy/build.py b/mypy/build.py
index 85d06c8..0f866c8 100644
--- a/mypy/build.py
+++ b/mypy/build.py
@@ -1114,6 +1114,7 @@ class State:
                  caller_state: 'State' = None,
                  caller_line: int = 0,
                  ancestor_for: 'State' = None,
+                 root_source: bool = False,
                  ) -> None:
         assert id or path or source is not None, "Neither id, path nor source given"
         self.manager = manager
@@ -1148,6 +1149,7 @@ class State:
                 # - skip -> don't analyze, make the type Any
                 follow_imports = self.options.follow_imports
                 if (follow_imports != 'normal'
+                    and not root_source  # Honor top-level modules
                     and path.endswith('.py')  # Stubs are always normal
                     and id != 'builtins'  # Builtins is always normal
                     and not (caller_state and
@@ -1519,6 +1521,9 @@ class State:
 def dispatch(sources: List[BuildSource], manager: BuildManager) -> None:
     manager.log("Mypy version %s" % __version__)
     graph = load_graph(sources, manager)
+    if not graph:
+        print("Nothing to do?!")
+        return
     manager.log("Loaded graph with %d nodes" % len(graph))
     if manager.options.dump_graph:
         dump_graph(graph)
@@ -1596,7 +1601,8 @@ def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
     # Seed the graph with the initial root sources.
     for bs in sources:
         try:
-            st = State(id=bs.module, path=bs.path, source=bs.text, manager=manager)
+            st = State(id=bs.module, path=bs.path, source=bs.text, manager=manager,
+                       root_source=True)
         except ModuleNotFound:
             continue
         if st.id in graph:
diff --git a/mypy/checker.py b/mypy/checker.py
index 6a1e10f..77c8c4b 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -24,17 +24,17 @@ from mypy.nodes import (
     YieldFromExpr, NamedTupleExpr, TypedDictExpr, SetComprehension,
     DictionaryComprehension, ComplexExpr, EllipsisExpr, TypeAliasExpr,
     RefExpr, YieldExpr, BackquoteExpr, ImportFrom, ImportAll, ImportBase,
-    AwaitExpr,
-    ARG_POS,
+    AwaitExpr, PromoteExpr, Node,
+    ARG_POS, MDEF,
     CONTRAVARIANT, COVARIANT)
 from mypy import nodes
 from mypy.types import (
     Type, AnyType, CallableType, Void, FunctionLike, Overloaded, TupleType, TypedDictType,
     Instance, NoneTyp, ErrorType, strip_type, TypeType,
-    UnionType, TypeVarId, TypeVarType, PartialType, DeletedType, UninhabitedType,
+    UnionType, TypeVarId, TypeVarType, PartialType, DeletedType, UninhabitedType, TypeVarDef,
     true_only, false_only, function_type
 )
-from mypy.sametypes import is_same_type
+from mypy.sametypes import is_same_type, is_same_types
 from mypy.messages import MessageBuilder
 import mypy.checkexpr
 from mypy.checkmember import map_type_from_supertype, bind_self, erase_to_bound
@@ -44,7 +44,8 @@ from mypy.subtypes import (
     restrict_subtype_away, is_subtype_ignoring_tvars
 )
 from mypy.maptype import map_instance_to_supertype
-from mypy.semanal import fill_typevars, set_callable_name, refers_to_fullname
+from mypy.typevars import fill_typevars, has_no_typevars
+from mypy.semanal import set_callable_name, refers_to_fullname
 from mypy.erasetype import erase_typevars
 from mypy.expandtype import expand_type, expand_type_by_instance
 from mypy.visitor import NodeVisitor
@@ -440,20 +441,6 @@ class TypeChecker(NodeVisitor[Type]):
             # Supertype of Generator (Iterator, Iterable, object): tr is any.
             return AnyType()
 
-    def check_awaitable_expr(self, t: Type, ctx: Context, msg: str) -> Type:
-        """Check the argument to `await` and extract the type of value.
-
-        Also used by `async for` and `async with`.
-        """
-        if not self.check_subtype(t, self.named_type('typing.Awaitable'), ctx,
-                                  msg, 'actual type', 'expected type'):
-            return AnyType()
-        else:
-            echk = self.expr_checker
-            method = echk.analyze_external_member_access('__await__', t, ctx)
-            generator = echk.check_call(method, [], [], ctx)[0]
-            return self.get_generator_return_type(generator, False)
-
     def visit_func_def(self, defn: FuncDef) -> Type:
         """Type check a function definition."""
         self.check_func_item(defn, name=defn.name())
@@ -572,7 +559,7 @@ class TypeChecker(NodeVisitor[Type]):
 
                 if name in nodes.reverse_op_method_set:
                     self.check_reverse_op_method(item, typ, name)
-                elif name == '__getattr__':
+                elif name in ('__getattr__', '__getattribute__'):
                     self.check_getattr_method(typ, defn)
 
                 # Refuse contravariant return type variable
@@ -654,7 +641,7 @@ class TypeChecker(NodeVisitor[Type]):
 
             if (self.options.warn_no_return and not unreachable
                     and not isinstance(self.return_types[-1], (Void, NoneTyp, AnyType))
-                    and not defn.is_generator):
+                    and (defn.is_coroutine or not defn.is_generator)):
                 # Control flow fell off the end of a function that was
                 # declared to return a non-None type.
                 # Allow functions that are entirely pass/Ellipsis.
@@ -1116,6 +1103,12 @@ class TypeChecker(NodeVisitor[Type]):
                                                       infer_lvalue_type)
         else:
             lvalue_type, index_lvalue, inferred = self.check_lvalue(lvalue)
+
+            if isinstance(lvalue, NameExpr):
+                if self.check_compatibility_all_supers(lvalue, lvalue_type, rvalue):
+                    # We hit an error on this line; don't check for any others
+                    return
+
             if lvalue_type:
                 if isinstance(lvalue_type, PartialType) and lvalue_type.type is None:
                     # Try to infer a proper type for a variable with a partial None type.
@@ -1164,12 +1157,129 @@ class TypeChecker(NodeVisitor[Type]):
                                             lvalue_type,
                                             False)
             elif index_lvalue:
-                self.check_indexed_assignment(index_lvalue, rvalue, rvalue)
+                self.check_indexed_assignment(index_lvalue, rvalue, lvalue)
 
             if inferred:
                 self.infer_variable_type(inferred, lvalue, self.accept(rvalue),
                                          rvalue)
 
+    def check_compatibility_all_supers(self, lvalue: NameExpr, lvalue_type: Type,
+                                       rvalue: Expression) -> bool:
+        lvalue_node = lvalue.node
+
+        # Check if we are a class variable with at least one base class
+        if (isinstance(lvalue_node, Var) and
+                lvalue.kind == MDEF and
+                len(lvalue_node.info.bases) > 0):
+
+            for base in lvalue_node.info.mro[1:]:
+                # Only check __slots__ against the 'object'
+                # If a base class defines a Tuple of 3 elements, a child of
+                # this class should not be allowed to define it as a Tuple of
+                # anything other than 3 elements. The exception to this rule
+                # is __slots__, where it is allowed for any child class to
+                # redefine it.
+                if lvalue_node.name() == "__slots__" and base.fullname() != "builtins.object":
+                    continue
+
+                base_type, base_node = self.lvalue_type_from_base(lvalue_node, base)
+
+                if base_type:
+                    if not self.check_compatibility_super(lvalue,
+                                                          lvalue_type,
+                                                          rvalue,
+                                                          base,
+                                                          base_type,
+                                                          base_node):
+                        # Only show one error per variable; even if other
+                        # base classes are also incompatible
+                        return True
+                    break
+        return False
+
+    def check_compatibility_super(self, lvalue: NameExpr, lvalue_type: Type, rvalue: Expression,
+                                  base: TypeInfo, base_type: Type, base_node: Node) -> bool:
+        lvalue_node = lvalue.node
+        assert isinstance(lvalue_node, Var)
+
+        # Do not check whether the rvalue is compatible if the
+        # lvalue had a type defined; this is handled by other
+        # parts, and all we have to worry about in that case is
+        # that lvalue is compatible with the base class.
+        compare_node = None  # type: Node
+        if lvalue_type:
+            compare_type = lvalue_type
+            compare_node = lvalue.node
+        else:
+            compare_type = self.accept(rvalue, base_type)
+            if isinstance(rvalue, NameExpr):
+                compare_node = rvalue.node
+                if isinstance(compare_node, Decorator):
+                    compare_node = compare_node.func
+
+        if compare_type:
+            if (isinstance(base_type, CallableType) and
+                    isinstance(compare_type, CallableType)):
+                base_static = is_node_static(base_node)
+                compare_static = is_node_static(compare_node)
+
+                # In case compare_static is unknown, also check
+                # if 'definition' is set. The most common case for
+                # this is with TempNode(), where we lose all
+                # information about the real rvalue node (but only get
+                # the rvalue type)
+                if compare_static is None and compare_type.definition:
+                    compare_static = is_node_static(compare_type.definition)
+
+                # Compare against False, as is_node_static can return None
+                if base_static is False and compare_static is False:
+                    # Class-level function objects and classmethods become bound
+                    # methods: the former to the instance, the latter to the
+                    # class
+                    base_type = bind_self(base_type, self.scope.active_class())
+                    compare_type = bind_self(compare_type, self.scope.active_class())
+
+                # If we are a static method, ensure to also tell the
+                # lvalue it now contains a static method
+                if base_static and compare_static:
+                    lvalue_node.is_staticmethod = True
+
+            return self.check_subtype(compare_type, base_type, lvalue,
+                                      messages.INCOMPATIBLE_TYPES_IN_ASSIGNMENT,
+                                      'expression has type',
+                                      'base class "%s" defined the type as' % base.name())
+        return True
+
+    def lvalue_type_from_base(self, expr_node: Var,
+                              base: TypeInfo) -> Tuple[Optional[Type], Optional[Node]]:
+        """For a NameExpr that is part of a class, walk all base classes and try
+        to find the first class that defines a Type for the same name."""
+        expr_name = expr_node.name()
+        base_var = base.names.get(expr_name)
+
+        if base_var:
+            base_node = base_var.node
+            base_type = base_var.type
+            if isinstance(base_node, Decorator):
+                base_node = base_node.func
+                base_type = base_node.type
+
+            if base_type:
+                if not has_no_typevars(base_type):
+                    instance = cast(Instance, self.scope.active_class())
+                    itype = map_instance_to_supertype(instance, base)
+                    base_type = expand_type_by_instance(base_type, itype)
+
+                if isinstance(base_type, CallableType) and isinstance(base_node, FuncDef):
+                    # If we are a property, return the Type of the return
+                    # value, not the Callable
+                    if base_node.is_property:
+                        base_type = base_type.ret_type
+
+                return base_type, base_node
+
+        return None, None
+
     def check_assignment_to_multiple_lvalues(self, lvalues: List[Lvalue], rvalue: Expression,
                                              context: Context,
                                              infer_lvalue_type: bool = True) -> None:
@@ -1633,8 +1743,10 @@ class TypeChecker(NodeVisitor[Type]):
                         context=s,
                         msg=messages.INCOMPATIBLE_RETURN_VALUE_TYPE)
             else:
-                # Empty returns are valid in Generators with Any typed returns.
-                if (defn.is_generator and isinstance(return_type, AnyType)):
+                # Empty returns are valid in Generators with Any typed returns, but not in
+                # coroutines.
+                if (defn.is_generator and not defn.is_coroutine and
+                        isinstance(return_type, AnyType)):
                     return
 
                 if isinstance(return_type, (Void, NoneTyp, AnyType)):
@@ -1651,6 +1763,15 @@ class TypeChecker(NodeVisitor[Type]):
             for e, b in zip(s.expr, s.body):
                 t = self.accept(e)
                 self.check_usable_type(t, e)
+
+                if isinstance(t, DeletedType):
+                    self.msg.deleted_as_rvalue(t, s)
+
+                if self.options.strict_boolean:
+                    is_bool = isinstance(t, Instance) and t.type.fullname() == 'builtins.bool'
+                    if not (is_bool or isinstance(t, AnyType)):
+                        self.fail(messages.NON_BOOLEAN_IN_CONDITIONAL, e)
+
                 if_map, else_map = self.find_isinstance_check(e)
 
                 # XXX Issue a warning if condition is always False?
@@ -1668,7 +1789,9 @@ class TypeChecker(NodeVisitor[Type]):
 
     def visit_while_stmt(self, s: WhileStmt) -> Type:
         """Type check a while statement."""
-        self.accept_loop(IfStmt([s.expr], [s.body], None), s.else_body,
+        if_stmt = IfStmt([s.expr], [s.body], None)
+        if_stmt.set_line(s.get_line(), s.get_column())
+        self.accept_loop(if_stmt, s.else_body,
                          exit_condition=s.expr)
         return None
 
@@ -1690,6 +1813,13 @@ class TypeChecker(NodeVisitor[Type]):
     def visit_assert_stmt(self, s: AssertStmt) -> Type:
         self.accept(s.expr)
 
+        if s.msg is not None:
+            self.accept(s.msg)
+
+        if self.options.fast_parser:
+            if isinstance(s.expr, TupleExpr) and len(s.expr.items) > 0:
+                self.warn(messages.MALFORMED_ASSERT, s)
+
         # If this is asserting some isinstance check, bind that type in the following code
         true_map, _ = self.find_isinstance_check(s.expr)
 
@@ -1856,7 +1986,7 @@ class TypeChecker(NodeVisitor[Type]):
             item_type = self.analyze_async_iterable_item_type(s.expr)
         else:
             item_type = self.analyze_iterable_item_type(s.expr)
-        self.analyze_index_variables(s.index, item_type, s)
+        self.analyze_index_variables(s.index, item_type, s.index_type is None, s)
         self.accept_loop(s.body, s.else_body)
         return None
 
@@ -1876,7 +2006,7 @@ class TypeChecker(NodeVisitor[Type]):
         iterator = echk.check_call(method, [], [], expr)[0]
         method = echk.analyze_external_member_access('__anext__', iterator, expr)
         awaitable = echk.check_call(method, [], [], expr)[0]
-        return self.check_awaitable_expr(awaitable, expr,
+        return echk.check_awaitable_expr(awaitable, expr,
                                          messages.INCOMPATIBLE_TYPES_IN_ASYNC_FOR)
 
     def analyze_iterable_item_type(self, expr: Expression) -> Type:
@@ -1915,9 +2045,9 @@ class TypeChecker(NodeVisitor[Type]):
             return echk.check_call(method, [], [], expr)[0]
 
     def analyze_index_variables(self, index: Expression, item_type: Type,
-                                context: Context) -> None:
+                                infer_lvalue_type: bool, context: Context) -> None:
         """Type check or infer for loop or list comprehension index vars."""
-        self.check_assignment(index, self.temp_node(item_type, context))
+        self.check_assignment(index, self.temp_node(item_type, context), infer_lvalue_type)
 
     def visit_del_stmt(self, s: DelStmt) -> Type:
         if isinstance(s.expr, IndexExpr):
@@ -1989,34 +2119,36 @@ class TypeChecker(NodeVisitor[Type]):
     def visit_with_stmt(self, s: WithStmt) -> Type:
         for expr, target in zip(s.expr, s.target):
             if s.is_async:
-                self.check_async_with_item(expr, target)
+                self.check_async_with_item(expr, target, s.target_type is None)
             else:
-                self.check_with_item(expr, target)
+                self.check_with_item(expr, target, s.target_type is None)
         self.accept(s.body)
         return None
 
-    def check_async_with_item(self, expr: Expression, target: Expression) -> None:
+    def check_async_with_item(self, expr: Expression, target: Expression,
+                              infer_lvalue_type: bool) -> None:
         echk = self.expr_checker
         ctx = self.accept(expr)
         enter = echk.analyze_external_member_access('__aenter__', ctx, expr)
         obj = echk.check_call(enter, [], [], expr)[0]
-        obj = self.check_awaitable_expr(
+        obj = echk.check_awaitable_expr(
             obj, expr, messages.INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AENTER)
         if target:
-            self.check_assignment(target, self.temp_node(obj, expr))
+            self.check_assignment(target, self.temp_node(obj, expr), infer_lvalue_type)
         exit = echk.analyze_external_member_access('__aexit__', ctx, expr)
         arg = self.temp_node(AnyType(), expr)
         res = echk.check_call(exit, [arg] * 3, [nodes.ARG_POS] * 3, expr)[0]
-        self.check_awaitable_expr(
+        echk.check_awaitable_expr(
             res, expr, messages.INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AEXIT)
 
-    def check_with_item(self, expr: Expression, target: Expression) -> None:
+    def check_with_item(self, expr: Expression, target: Expression,
+                        infer_lvalue_type: bool) -> None:
         echk = self.expr_checker
         ctx = self.accept(expr)
         enter = echk.analyze_external_member_access('__enter__', ctx, expr)
         obj = echk.check_call(enter, [], [], expr)[0]
         if target:
-            self.check_assignment(target, self.temp_node(obj, expr))
+            self.check_assignment(target, self.temp_node(obj, expr), infer_lvalue_type)
         exit = echk.analyze_external_member_access('__exit__', ctx, expr)
         arg = self.temp_node(AnyType(), expr)
         echk.check_call(exit, [arg] * 3, [nodes.ARG_POS] * 3, expr)
@@ -2031,6 +2163,14 @@ class TypeChecker(NodeVisitor[Type]):
                 self.expr_checker.analyze_external_member_access('write', target_type, s.target)
         return None
 
+    def visit_break_stmt(self, s: BreakStmt) -> Type:
+        self.binder.handle_break()
+        return None
+
+    def visit_continue_stmt(self, s: ContinueStmt) -> Type:
+        self.binder.handle_continue()
+        return None
+
     #
     # Expressions
     #
@@ -2042,99 +2182,11 @@ class TypeChecker(NodeVisitor[Type]):
         return self.expr_checker.visit_call_expr(e)
 
     def visit_yield_from_expr(self, e: YieldFromExpr) -> Type:
-        # NOTE: Whether `yield from` accepts an `async def` decorated
-        # with `@types.coroutine` (or `@asyncio.coroutine`) depends on
-        # whether the generator containing the `yield from` is itself
-        # thus decorated.  But it accepts a generator regardless of
-        # how it's decorated.
-        return_type = self.return_types[-1]
-        subexpr_type = self.accept(e.expr, return_type)
-        iter_type = None  # type: Type
-
-        # Check that the expr is an instance of Iterable and get the type of the iterator produced
-        # by __iter__.
-        if isinstance(subexpr_type, AnyType):
-            iter_type = AnyType()
-        elif (isinstance(subexpr_type, Instance) and
-                is_subtype(subexpr_type, self.named_type('typing.Iterable'))):
-            if self.is_async_def(subexpr_type) and not self.has_coroutine_decorator(return_type):
-                self.msg.yield_from_invalid_operand_type(subexpr_type, e)
-            iter_method_type = self.expr_checker.analyze_external_member_access(
-                '__iter__',
-                subexpr_type,
-                AnyType())
-
-            generic_generator_type = self.named_generic_type('typing.Generator',
-                                                             [AnyType(), AnyType(), AnyType()])
-            iter_type, _ = self.expr_checker.check_call(iter_method_type, [], [],
-                                                        context=generic_generator_type)
-        else:
-            if not (self.is_async_def(subexpr_type) and self.has_coroutine_decorator(return_type)):
-                self.msg.yield_from_invalid_operand_type(subexpr_type, e)
-                iter_type = AnyType()
-            else:
-                iter_type = self.check_awaitable_expr(subexpr_type, e,
-                                                      messages.INCOMPATIBLE_TYPES_IN_YIELD_FROM)
-
-        # Check that the iterator's item type matches the type yielded by the Generator function
-        # containing this `yield from` expression.
-        expected_item_type = self.get_generator_yield_type(return_type, False)
-        actual_item_type = self.get_generator_yield_type(iter_type, False)
-
-        self.check_subtype(actual_item_type, expected_item_type, e,
-                           messages.INCOMPATIBLE_TYPES_IN_YIELD_FROM,
-                           'actual type', 'expected type')
-
-        # Determine the type of the entire yield from expression.
-        if (isinstance(iter_type, Instance) and
-                iter_type.type.fullname() == 'typing.Generator'):
-            return self.get_generator_return_type(iter_type, False)
-        else:
-            # Non-Generators don't return anything from `yield from` expressions.
-            # However special-case Any (which might be produced by an error).
-            if isinstance(actual_item_type, AnyType):
-                return AnyType()
-            else:
-                if experiments.STRICT_OPTIONAL:
-                    return NoneTyp(is_ret_type=True)
-                else:
-                    return Void()
-
-    def has_coroutine_decorator(self, t: Type) -> bool:
-        """Whether t came from a function decorated with `@coroutine`."""
-        return isinstance(t, Instance) and t.type.fullname() == 'typing.AwaitableGenerator'
-
-    def is_async_def(self, t: Type) -> bool:
-        """Whether t came from a function defined using `async def`."""
-        # In check_func_def(), when we see a function decorated with
-        # `@typing.coroutine` or `@async.coroutine`, we change the
-        # return type to typing.AwaitableGenerator[...], so that its
-        # type is compatible with either Generator or Awaitable.
-        # But for the check here we need to know whether the original
-        # function (before decoration) was an `async def`.  The
-        # AwaitableGenerator type conveniently preserves the original
-        # type as its 4th parameter (3rd when using 0-origin indexing
-        # :-), so that we can recover that information here.
-        # (We really need to see whether the original, undecorated
-        # function was an `async def`, which is orthogonal to its
-        # decorations.)
-        if (isinstance(t, Instance)
-                and t.type.fullname() == 'typing.AwaitableGenerator'
-                and len(t.args) >= 4):
-            t = t.args[3]
-        return isinstance(t, Instance) and t.type.fullname() == 'typing.Awaitable'
+        return self.expr_checker.visit_yield_from_expr(e)
 
     def visit_member_expr(self, e: MemberExpr) -> Type:
         return self.expr_checker.visit_member_expr(e)
 
-    def visit_break_stmt(self, s: BreakStmt) -> Type:
-        self.binder.handle_break()
-        return None
-
-    def visit_continue_stmt(self, s: ContinueStmt) -> Type:
-        self.binder.handle_continue()
-        return None
-
     def visit_int_expr(self, e: IntExpr) -> Type:
         return self.expr_checker.visit_int_expr(e)
 
@@ -2184,19 +2236,16 @@ class TypeChecker(NodeVisitor[Type]):
         return self.expr_checker.visit_type_alias_expr(e)
 
     def visit_type_var_expr(self, e: TypeVarExpr) -> Type:
-        # TODO: Perhaps return a special type used for type variables only?
-        return AnyType()
+        return self.expr_checker.visit_type_var_expr(e)
 
     def visit_newtype_expr(self, e: NewTypeExpr) -> Type:
-        return AnyType()
+        return self.expr_checker.visit_newtype_expr(e)
 
     def visit_namedtuple_expr(self, e: NamedTupleExpr) -> Type:
-        # TODO: Perhaps return a type object type?
-        return AnyType()
+        return self.expr_checker.visit_namedtuple_expr(e)
 
     def visit_typeddict_expr(self, e: TypedDictExpr) -> Type:
-        # TODO: Perhaps return a type object type?
-        return AnyType()
+        return self.expr_checker.visit_typeddict_expr(e)
 
     def visit_list_expr(self, e: ListExpr) -> Type:
         return self.expr_checker.visit_list_expr(e)
@@ -2229,7 +2278,7 @@ class TypeChecker(NodeVisitor[Type]):
         return self.expr_checker.visit_dictionary_comprehension(e)
 
     def visit_temp_node(self, e: TempNode) -> Type:
-        return e.type
+        return self.expr_checker.visit_temp_node(e)
 
     def visit_conditional_expr(self, e: ConditionalExpr) -> Type:
         return self.expr_checker.visit_conditional_expr(e)
@@ -2238,27 +2287,16 @@ class TypeChecker(NodeVisitor[Type]):
         return self.expr_checker.visit_backquote_expr(e)
 
     def visit_yield_expr(self, e: YieldExpr) -> Type:
-        return_type = self.return_types[-1]
-        expected_item_type = self.get_generator_yield_type(return_type, False)
-        if e.expr is None:
-            if (not isinstance(expected_item_type, (Void, NoneTyp, AnyType))
-                    and self.in_checked_function()):
-                self.fail(messages.YIELD_VALUE_EXPECTED, e)
-        else:
-            actual_item_type = self.accept(e.expr, expected_item_type)
-            self.check_subtype(actual_item_type, expected_item_type, e,
-                            messages.INCOMPATIBLE_TYPES_IN_YIELD,
-                            'actual type', 'expected type')
-        return self.get_generator_receive_type(return_type, False)
+        return self.expr_checker.visit_yield_expr(e)
 
     def visit_await_expr(self, e: AwaitExpr) -> Type:
-        expected_type = self.type_context[-1]
-        if expected_type is not None:
-            expected_type = self.named_generic_type('typing.Awaitable', [expected_type])
-        actual_type = self.accept(e.expr, expected_type)
-        if isinstance(actual_type, AnyType):
-            return AnyType()
-        return self.check_awaitable_expr(actual_type, e, messages.INCOMPATIBLE_TYPES_IN_AWAIT)
+        return self.expr_checker.visit_await_expr(e)
+
+    def visit__promote_expr(self, e: PromoteExpr) -> Type:
+        return self.expr_checker.visit__promote_expr(e)
+
+    def visit_star_expr(self, e: StarExpr) -> Type:
+        return self.expr_checker.visit_star_expr(e)
 
     #
     # Helpers
@@ -2439,6 +2477,10 @@ class TypeChecker(NodeVisitor[Type]):
         """Produce an error message."""
         self.msg.fail(msg, context)
 
+    def warn(self, msg: str, context: Context) -> None:
+        """Produce a warning message."""
+        self.msg.warn(msg, context)
+
     def iterable_item_type(self, instance: Instance) -> Type:
         iterable = map_instance_to_supertype(
             instance,
@@ -2834,9 +2876,10 @@ def is_more_general_arg_prefix(t: FunctionLike, s: FunctionLike) -> bool:
     """Does t have wider arguments than s?"""
     # TODO should an overload with additional items be allowed to be more
     #      general than one with fewer items (or just one item)?
-    # TODO check argument kinds
+    # TODO check argument kinds and otherwise make more general
     if isinstance(t, CallableType):
         if isinstance(s, CallableType):
+            t, s = unify_generic_callables(t, s)
             return all(is_proper_subtype(args, argt)
                        for argt, args in zip(t.arg_types, s.arg_types))
     elif isinstance(t, FunctionLike):
@@ -2847,6 +2890,43 @@ def is_more_general_arg_prefix(t: FunctionLike, s: FunctionLike) -> bool:
     return False
 
 
+def unify_generic_callables(t: CallableType,
+                            s: CallableType) -> Tuple[CallableType,
+                                                      CallableType]:
+    """Make type variables in generic callables the same if possible.
+
+    Return updated callables. If we can't unify the type variables,
+    return the unmodified arguments.
+    """
+    # TODO: Use this elsewhere when comparing generic callables.
+    if t.is_generic() and s.is_generic():
+        t_substitutions = {}
+        s_substitutions = {}
+        for tv1, tv2 in zip(t.variables, s.variables):
+            # Are these something we can unify?
+            if tv1.id != tv2.id and is_equivalent_type_var_def(tv1, tv2):
+                newdef = TypeVarDef.new_unification_variable(tv2)
+                t_substitutions[tv1.id] = TypeVarType(newdef)
+                s_substitutions[tv2.id] = TypeVarType(newdef)
+        return (cast(CallableType, expand_type(t, t_substitutions)),
+                cast(CallableType, expand_type(s, s_substitutions)))
+    return t, s
+
+
+def is_equivalent_type_var_def(tv1: TypeVarDef, tv2: TypeVarDef) -> bool:
+    """Are type variable definitions equivalent?
+
+    Ignore ids, locations in source file and names.
+    """
+    return (
+        tv1.variance == tv2.variance
+        and is_same_types(tv1.values, tv2.values)
+        and ((tv1.upper_bound is None and tv2.upper_bound is None)
+             or (tv1.upper_bound is not None
+                 and tv2.upper_bound is not None
+                 and is_same_type(tv1.upper_bound, tv2.upper_bound))))
+
+
 def is_same_arg_prefix(t: CallableType, s: CallableType) -> bool:
     # TODO check argument kinds
     return all(is_same_type(argt, args)
@@ -2925,6 +3005,18 @@ def is_valid_inferred_type_component(typ: Type) -> bool:
     return True
 
 
+def is_node_static(node: Node) -> Optional[bool]:
+    """Find out if a node describes a static function method."""
+
+    if isinstance(node, FuncDef):
+        return node.is_static
+
+    if isinstance(node, Var):
+        return node.is_staticmethod
+
+    return None
+
+
 class Scope:
     # We keep two stacks combined, to maintain the relative order
     stack = None  # type: List[Union[Type, FuncItem, MypyFile]]
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index d465ae4..79454d0 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -9,7 +9,7 @@ from mypy.types import (
     PartialType, DeletedType, UnboundType, UninhabitedType, TypeType,
     true_only, false_only, is_named_instance, function_type, callable_type, FunctionLike,
     get_typ_args, set_typ_args,
-)
+    StarType)
 from mypy.nodes import (
     NameExpr, RefExpr, Var, FuncDef, OverloadedFuncDef, TypeInfo, CallExpr,
     MemberExpr, IntExpr, StrExpr, BytesExpr, UnicodeExpr, FloatExpr,
@@ -17,7 +17,8 @@ from mypy.nodes import (
     TupleExpr, DictExpr, FuncExpr, SuperExpr, SliceExpr, Context, Expression,
     ListComprehension, GeneratorExpr, SetExpr, MypyFile, Decorator,
     ConditionalExpr, ComparisonExpr, TempNode, SetComprehension,
-    DictionaryComprehension, ComplexExpr, EllipsisExpr, StarExpr,
+    DictionaryComprehension, ComplexExpr, EllipsisExpr, StarExpr, AwaitExpr, YieldExpr,
+    YieldFromExpr, TypedDictExpr, PromoteExpr, NewTypeExpr, NamedTupleExpr, TypeVarExpr,
     TypeAliasExpr, BackquoteExpr, ARG_POS, ARG_NAMED, ARG_STAR, ARG_STAR2, MODULE_REF,
     UNBOUND_TVAR, BOUND_TVAR,
 )
@@ -37,9 +38,10 @@ from mypy import erasetype
 from mypy.checkmember import analyze_member_access, type_object_type, bind_self
 from mypy.constraints import get_actual_type
 from mypy.checkstrformat import StringFormatterChecker
-from mypy.expandtype import expand_type, expand_type_by_instance
+from mypy.expandtype import expand_type, expand_type_by_instance, freshen_function_type_vars
 from mypy.util import split_module_names
-from mypy.semanal import fill_typevars
+from mypy.typevars import fill_typevars
+from mypy.visitor import ExpressionVisitor
 
 from mypy import experiments
 
@@ -85,7 +87,7 @@ class Finished(Exception):
     """Raised if we can terminate overload argument check early (no match)."""
 
 
-class ExpressionChecker:
+class ExpressionChecker(ExpressionVisitor[Type]):
     """Expression type checker.
 
     This class works closely together with checker.TypeChecker.
@@ -353,7 +355,7 @@ class ExpressionChecker:
                 lambda i: self.accept(args[i]))
 
             if callee.is_generic():
-                callee = freshen_generic_callable(callee)
+                callee = freshen_function_type_vars(callee)
                 callee = self.infer_function_type_arguments_using_context(
                     callee, context)
                 callee = self.infer_function_type_arguments(
@@ -1513,7 +1515,7 @@ class ExpressionChecker:
 
     def visit_reveal_type_expr(self, expr: RevealTypeExpr) -> Type:
         """Type check a reveal_type expression."""
-        revealed_type = self.accept(expr.expr)
+        revealed_type = self.accept(expr.expr, context=self.chk.type_context[-1])
         if not self.chk.current_node_deferred:
             self.msg.reveal_type(revealed_type, expr)
         return revealed_type
@@ -1916,7 +1918,7 @@ class ExpressionChecker:
         for index, sequence, conditions in zip(e.indices, e.sequences,
                                                e.condlists):
             sequence_type = self.chk.analyze_iterable_item_type(sequence)
-            self.chk.analyze_index_variables(index, sequence_type, e)
+            self.chk.analyze_index_variables(index, sequence_type, True, e)
             for condition in conditions:
                 self.accept(condition)
 
@@ -1930,6 +1932,11 @@ class ExpressionChecker:
     def visit_conditional_expr(self, e: ConditionalExpr) -> Type:
         cond_type = self.accept(e.cond)
         self.check_usable_type(cond_type, e)
+        if self.chk.options.strict_boolean:
+            is_bool = (isinstance(cond_type, Instance)
+                and cond_type.type.fullname() == 'builtins.bool')
+            if not (is_bool or isinstance(cond_type, AnyType)):
+                self.chk.fail(messages.NON_BOOLEAN_IN_CONDITIONAL, e)
         ctx = self.chk.type_context[-1]
 
         # Gain type information from isinstance if it is there
@@ -2040,6 +2047,151 @@ class ExpressionChecker:
         """
         self.chk.handle_cannot_determine_type(name, context)
 
+    def visit_yield_expr(self, e: YieldExpr) -> Type:
+        return_type = self.chk.return_types[-1]
+        expected_item_type = self.chk.get_generator_yield_type(return_type, False)
+        if e.expr is None:
+            if (not isinstance(expected_item_type, (Void, NoneTyp, AnyType))
+                    and self.chk.in_checked_function()):
+                self.chk.fail(messages.YIELD_VALUE_EXPECTED, e)
+        else:
+            actual_item_type = self.accept(e.expr, expected_item_type)
+            self.chk.check_subtype(actual_item_type, expected_item_type, e,
+                                   messages.INCOMPATIBLE_TYPES_IN_YIELD,
+                                   'actual type', 'expected type')
+        return self.chk.get_generator_receive_type(return_type, False)
+
+    def visit_await_expr(self, e: AwaitExpr) -> Type:
+        expected_type = self.chk.type_context[-1]
+        if expected_type is not None:
+            expected_type = self.chk.named_generic_type('typing.Awaitable', [expected_type])
+        actual_type = self.accept(e.expr, expected_type)
+        if isinstance(actual_type, AnyType):
+            return AnyType()
+        return self.check_awaitable_expr(actual_type, e, messages.INCOMPATIBLE_TYPES_IN_AWAIT)
+
+    def check_awaitable_expr(self, t: Type, ctx: Context, msg: str) -> Type:
+        """Check the argument to `await` and extract the type of value.
+
+        Also used by `async for` and `async with`.
+        """
+        if not self.chk.check_subtype(t, self.named_type('typing.Awaitable'), ctx,
+                                      msg, 'actual type', 'expected type'):
+            return AnyType()
+        else:
+            method = self.analyze_external_member_access('__await__', t, ctx)
+            generator = self.check_call(method, [], [], ctx)[0]
+            return self.chk.get_generator_return_type(generator, False)
+
+    def visit_yield_from_expr(self, e: YieldFromExpr) -> Type:
+        # NOTE: Whether `yield from` accepts an `async def` decorated
+        # with `@types.coroutine` (or `@asyncio.coroutine`) depends on
+        # whether the generator containing the `yield from` is itself
+        # thus decorated.  But it accepts a generator regardless of
+        # how it's decorated.
+        return_type = self.chk.return_types[-1]
+        subexpr_type = self.accept(e.expr, return_type)
+        iter_type = None  # type: Type
+
+        # Check that the expr is an instance of Iterable and get the type of the iterator produced
+        # by __iter__.
+        if isinstance(subexpr_type, AnyType):
+            iter_type = AnyType()
+        elif (isinstance(subexpr_type, Instance) and
+                is_subtype(subexpr_type, self.chk.named_type('typing.Iterable'))):
+            if is_async_def(subexpr_type) and not has_coroutine_decorator(return_type):
+                self.chk.msg.yield_from_invalid_operand_type(subexpr_type, e)
+            iter_method_type = self.analyze_external_member_access(
+                '__iter__',
+                subexpr_type,
+                AnyType())
+
+            generic_generator_type = self.chk.named_generic_type('typing.Generator',
+                                                                 [AnyType(), AnyType(), AnyType()])
+            iter_type, _ = self.check_call(iter_method_type, [], [],
+                                           context=generic_generator_type)
+        else:
+            if not (is_async_def(subexpr_type) and has_coroutine_decorator(return_type)):
+                self.chk.msg.yield_from_invalid_operand_type(subexpr_type, e)
+                iter_type = AnyType()
+            else:
+                iter_type = self.check_awaitable_expr(subexpr_type, e,
+                                                      messages.INCOMPATIBLE_TYPES_IN_YIELD_FROM)
+
+        # Check that the iterator's item type matches the type yielded by the Generator function
+        # containing this `yield from` expression.
+        expected_item_type = self.chk.get_generator_yield_type(return_type, False)
+        actual_item_type = self.chk.get_generator_yield_type(iter_type, False)
+
+        self.chk.check_subtype(actual_item_type, expected_item_type, e,
+                           messages.INCOMPATIBLE_TYPES_IN_YIELD_FROM,
+                           'actual type', 'expected type')
+
+        # Determine the type of the entire yield from expression.
+        if (isinstance(iter_type, Instance) and
+                iter_type.type.fullname() == 'typing.Generator'):
+            return self.chk.get_generator_return_type(iter_type, False)
+        else:
+            # Non-Generators don't return anything from `yield from` expressions.
+            # However special-case Any (which might be produced by an error).
+            if isinstance(actual_item_type, AnyType):
+                return AnyType()
+            else:
+                if experiments.STRICT_OPTIONAL:
+                    return NoneTyp(is_ret_type=True)
+                else:
+                    return Void()
+
+    def visit_temp_node(self, e: TempNode) -> Type:
+        return e.type
+
+    def visit_type_var_expr(self, e: TypeVarExpr) -> Type:
+        # TODO: Perhaps return a special type used for type variables only?
+        return AnyType()
+
+    def visit_newtype_expr(self, e: NewTypeExpr) -> Type:
+        return AnyType()
+
+    def visit_namedtuple_expr(self, e: NamedTupleExpr) -> Type:
+        # TODO: Perhaps return a type object type?
+        return AnyType()
+
+    def visit_typeddict_expr(self, e: TypedDictExpr) -> Type:
+        # TODO: Perhaps return a type object type?
+        return AnyType()
+
+    def visit__promote_expr(self, e: PromoteExpr) -> Type:
+        return e.type
+
+    def visit_star_expr(self, e: StarExpr) -> StarType:
+        return StarType(self.accept(e.expr))
+
+
+def has_coroutine_decorator(t: Type) -> bool:
+    """Whether t came from a function decorated with `@coroutine`."""
+    return isinstance(t, Instance) and t.type.fullname() == 'typing.AwaitableGenerator'
+
+
+def is_async_def(t: Type) -> bool:
+    """Whether t came from a function defined using `async def`."""
+    # In check_func_def(), when we see a function decorated with
+    # `@typing.coroutine` or `@async.coroutine`, we change the
+    # return type to typing.AwaitableGenerator[...], so that its
+    # type is compatible with either Generator or Awaitable.
+    # But for the check here we need to know whether the original
+    # function (before decoration) was an `async def`.  The
+    # AwaitableGenerator type conveniently preserves the original
+    # type as its 4th parameter (3rd when using 0-origin indexing
+    # :-), so that we can recover that information here.
+    # (We really need to see whether the original, undecorated
+    # function was an `async def`, which is orthogonal to its
+    # decorations.)
+    if (isinstance(t, Instance)
+            and t.type.fullname() == 'typing.AwaitableGenerator'
+            and len(t.args) >= 4):
+        t = t.args[3]
+    return isinstance(t, Instance) and t.type.fullname() == 'typing.Awaitable'
+
 
 def map_actuals_to_formals(caller_kinds: List[int],
                            caller_names: List[str],
@@ -2245,14 +2397,3 @@ def overload_arg_similarity(actual: Type, formal: Type) -> int:
         return 2
     # Fall back to a conservative equality check for the remaining kinds of type.
     return 2 if is_same_type(erasetype.erase_type(actual), erasetype.erase_type(formal)) else 0
-
-
-def freshen_generic_callable(callee: CallableType) -> CallableType:
-    tvdefs = []
-    tvmap = {}  # type: Dict[TypeVarId, Type]
-    for v in callee.variables:
-        tvdef = TypeVarDef.new_unification_variable(v)
-        tvdefs.append(tvdef)
-        tvmap[v.id] = TypeVarType(tvdef)
-
-    return cast(CallableType, expand_type(callee, tvmap)).copy_modified(variables=tvdefs)
diff --git a/mypy/checkmember.py b/mypy/checkmember.py
index 31bd699..456ad6b 100644
--- a/mypy/checkmember.py
+++ b/mypy/checkmember.py
@@ -14,9 +14,9 @@ from mypy.nodes import (
 )
 from mypy.messages import MessageBuilder
 from mypy.maptype import map_instance_to_supertype
-from mypy.expandtype import expand_type_by_instance, expand_type
+from mypy.expandtype import expand_type_by_instance, expand_type, freshen_function_type_vars
 from mypy.infer import infer_type_arguments
-from mypy.semanal import fill_typevars
+from mypy.typevars import fill_typevars
 from mypy import messages
 from mypy import subtypes
 MYPY = False
@@ -80,6 +80,7 @@ def analyze_member_access(name: str,
             if is_lvalue:
                 msg.cant_assign_to_method(node)
             signature = function_type(method, builtin_type('builtins.function'))
+            signature = freshen_function_type_vars(signature)
             if name == '__new__':
                 # __new__ is special and behaves like a static method -- don't strip
                 # the first argument.
@@ -218,16 +219,20 @@ def analyze_member_var_access(name: str, itype: Instance, info: TypeInfo,
                            original_type, not_ready_callback)
     elif isinstance(v, FuncDef):
         assert False, "Did not expect a function"
-    elif not v and name not in ['__getattr__', '__setattr__']:
+    elif not v and name not in ['__getattr__', '__setattr__', '__getattribute__']:
         if not is_lvalue:
-            method = info.get_method('__getattr__')
-            if method:
-                function = function_type(method, builtin_type('builtins.function'))
-                bound_method = bind_self(function, original_type)
-                typ = map_instance_to_supertype(itype, method.info)
-                getattr_type = expand_type_by_instance(bound_method, typ)
-                if isinstance(getattr_type, CallableType):
-                    return getattr_type.ret_type
+            for method_name in ('__getattribute__', '__getattr__'):
+                method = info.get_method(method_name)
+                # __getattribute__ is defined on builtins.object and returns Any, so without
+                # the guard this search will always find object.__getattribute__ and conclude
+                # that the attribute exists
+                if method and method.info.fullname() != 'builtins.object':
+                    function = function_type(method, builtin_type('builtins.function'))
+                    bound_method = bind_self(function, original_type)
+                    typ = map_instance_to_supertype(itype, method.info)
+                    getattr_type = expand_type_by_instance(bound_method, typ)
+                    if isinstance(getattr_type, CallableType):
+                        return getattr_type.ret_type
 
     if itype.type.fallback_to_any:
         return AnyType()
diff --git a/mypy/constraints.py b/mypy/constraints.py
index 1d1e1c3..533a436 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -2,10 +2,12 @@
 
 from typing import Iterable, List, Optional
 
+from mypy import experiments
 from mypy.types import (
     CallableType, Type, TypeVisitor, UnboundType, AnyType, Void, NoneTyp, TypeVarType,
     Instance, TupleType, TypedDictType, UnionType, Overloaded, ErasedType, PartialType,
-    DeletedType, UninhabitedType, TypeType, TypeVarId, is_named_instance
+    DeletedType, UninhabitedType, TypeType, TypeVarId, TypeQuery, ALL_TYPES_STRATEGY,
+    is_named_instance
 )
 from mypy.maptype import map_instance_to_supertype
 from mypy import nodes
@@ -149,13 +151,23 @@ def infer_constraints(template: Type, actual: Type,
     # be a supertype of the potential subtype, some item of the Union
     # must be a supertype of it.
     if direction == SUBTYPE_OF and isinstance(actual, UnionType):
+        # If some of items is not a complete type, disregard that.
+        items = simplify_away_incomplete_types(actual.items)
+        # We infer constraints eagerly -- try to find constraints for a type
+        # variable if possible. This seems to help with some real-world
+        # use cases.
         return any_constraints(
             [infer_constraints_if_possible(template, a_item, direction)
-             for a_item in actual.items])
+             for a_item in items],
+            eager=True)
     if direction == SUPERTYPE_OF and isinstance(template, UnionType):
+        # When the template is a union, we are okay with leaving some
+        # type variables indeterminate. This helps with some special
+        # cases, though this isn't very principled.
         return any_constraints(
             [infer_constraints_if_possible(t_item, actual, direction)
-             for t_item in template.items])
+             for t_item in template.items],
+            eager=False)
 
     # Remaining cases are handled by ConstraintBuilderVisitor.
     return template.accept(ConstraintBuilderVisitor(actual, direction))
@@ -177,12 +189,18 @@ def infer_constraints_if_possible(template: Type, actual: Type,
     return infer_constraints(template, actual, direction)
 
 
-def any_constraints(options: List[Optional[List[Constraint]]]) -> List[Constraint]:
-    """Deduce what we can from a collection of constraint lists given that
-    at least one of the lists must be satisfied. A None element in the
-    list of options represents an unsatisfiable constraint and is ignored.
+def any_constraints(options: List[Optional[List[Constraint]]], eager: bool) -> List[Constraint]:
+    """Deduce what we can from a collection of constraint lists.
+
+    It's a given that at least one of the lists must be satisfied. A
+    None element in the list of options represents an unsatisfiable
+    constraint and is ignored.  Ignore empty constraint lists if eager
+    is true -- they are always trivially satisfiable.
     """
-    valid_options = [option for option in options if option is not None]
+    if eager:
+        valid_options = [option for option in options if option]
+    else:
+        valid_options = [option for option in options if option is not None]
     if len(valid_options) == 1:
         return valid_options[0]
     # Otherwise, there are either no valid options or multiple valid options.
@@ -196,6 +214,34 @@ def any_constraints(options: List[Optional[List[Constraint]]]) -> List[Constrain
     # every option, combine the bounds with meet/join.
 
 
+def simplify_away_incomplete_types(types: List[Type]) -> List[Type]:
+    complete = [typ for typ in types if is_complete_type(typ)]
+    if complete:
+        return complete
+    else:
+        return types
+
+
+def is_complete_type(typ: Type) -> bool:
+    """Is a type complete?
+
+    A complete doesn't have uninhabited type components or (when not in strict
+    optional mode) None components.
+    """
+    return typ.accept(CompleteTypeVisitor())
+
+
+class CompleteTypeVisitor(TypeQuery):
+    def __init__(self) -> None:
+        super().__init__(default=True, strategy=ALL_TYPES_STRATEGY)
+
+    def visit_none_type(self, t: NoneTyp) -> bool:
+        return experiments.STRICT_OPTIONAL
+
+    def visit_uninhabited_type(self, t: UninhabitedType) -> bool:
+        return False
+
+
 class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
     """Visitor class for inferring type constraints."""
 
diff --git a/mypy/errors.py b/mypy/errors.py
index 702031d..acbd3d1 100644
--- a/mypy/errors.py
+++ b/mypy/errors.py
@@ -33,7 +33,7 @@ class ErrorInfo:
     # The column number related to this error with file.
     column = 0   # -1 if unknown
 
-    # Either 'error' or 'note'.
+    # Either 'error', 'note', or 'warning'.
     severity = ''
 
     # The error message.
diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index 785c77d..e90a89f 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -1,13 +1,14 @@
-from typing import Dict, Iterable, List
+from typing import Dict, Iterable, List, TypeVar, Mapping, cast
 
 from mypy.types import (
     Type, Instance, CallableType, TypeVisitor, UnboundType, ErrorType, AnyType,
     Void, NoneTyp, TypeVarType, Overloaded, TupleType, TypedDictType, UnionType,
-    ErasedType, TypeList, PartialType, DeletedType, UninhabitedType, TypeType, TypeVarId
+    ErasedType, TypeList, PartialType, DeletedType, UninhabitedType, TypeType, TypeVarId,
+    FunctionLike, TypeVarDef
 )
 
 
-def expand_type(typ: Type, env: Dict[TypeVarId, Type]) -> Type:
+def expand_type(typ: Type, env: Mapping[TypeVarId, Type]) -> Type:
     """Substitute any type variable references in a type given by a type
     environment.
     """
@@ -28,12 +29,35 @@ def expand_type_by_instance(typ: Type, instance: Instance) -> Type:
         return expand_type(typ, variables)
 
 
+F = TypeVar('F', bound=FunctionLike)
+
+
+def freshen_function_type_vars(callee: F) -> F:
+    """Substitute fresh type variables for generic function type variables."""
+    if isinstance(callee, CallableType):
+        if not callee.is_generic():
+            return cast(F, callee)
+        tvdefs = []
+        tvmap = {}  # type: Dict[TypeVarId, Type]
+        for v in callee.variables:
+            tvdef = TypeVarDef.new_unification_variable(v)
+            tvdefs.append(tvdef)
+            tvmap[v.id] = TypeVarType(tvdef)
+        fresh = cast(CallableType, expand_type(callee, tvmap)).copy_modified(variables=tvdefs)
+        return cast(F, fresh)
+    else:
+        assert isinstance(callee, Overloaded)
+        fresh_overload = Overloaded([freshen_function_type_vars(item)
+                                     for item in callee.items()])
+        return cast(F, fresh_overload)
+
+
 class ExpandTypeVisitor(TypeVisitor[Type]):
     """Visitor that substitutes type variables with values."""
 
-    variables = None  # type: Dict[TypeVarId, Type]  # TypeVar id -> TypeVar value
+    variables = None  # type: Mapping[TypeVarId, Type]  # TypeVar id -> TypeVar value
 
-    def __init__(self, variables: Dict[TypeVarId, Type]) -> None:
+    def __init__(self, variables: Mapping[TypeVarId, Type]) -> None:
         self.variables = variables
 
     def visit_unbound_type(self, t: UnboundType) -> Type:
diff --git a/mypy/fastparse.py b/mypy/fastparse.py
index b882124..1d8fe4b 100644
--- a/mypy/fastparse.py
+++ b/mypy/fastparse.py
@@ -1,7 +1,7 @@
 from functools import wraps
 import sys
 
-from typing import Tuple, Union, TypeVar, Callable, Sequence, Optional, Any, cast, List
+from typing import Tuple, Union, TypeVar, Callable, Sequence, Optional, Any, cast, List, Set
 from mypy.sharedparse import special_function_elide_names, argument_elide_name
 from mypy.nodes import (
     MypyFile, Node, ImportBase, Import, ImportAll, ImportFrom, FuncDef, OverloadedFuncDef,
@@ -23,6 +23,7 @@ from mypy.types import (
 )
 from mypy import defaults
 from mypy import experiments
+from mypy import messages
 from mypy.errors import Errors
 
 try:
@@ -56,6 +57,11 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
 
     The pyversion (major, minor) argument determines the Python syntax variant.
     """
+    raise_on_error = False
+    if errors is None:
+        errors = Errors()
+        raise_on_error = True
+    errors.set_file('<input>' if fnam is None else fnam)
     is_stub_file = bool(fnam) and fnam.endswith('.pyi')
     try:
         assert pyversion[0] >= 3 or is_stub_file
@@ -63,29 +69,30 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
 
         tree = ASTConverter(pyversion=pyversion,
                             is_stub=is_stub_file,
+                            errors=errors,
                             custom_typing_module=custom_typing_module,
                             ).visit(ast)
         tree.path = fnam
         tree.is_stub = is_stub_file
-        return tree
-    except (SyntaxError, TypeCommentParseError) as e:
-        if errors:
-            errors.set_file('<input>' if fnam is None else fnam)
-            errors.report(e.lineno, e.offset, e.msg)
-        else:
-            raise
+    except SyntaxError as e:
+        errors.report(e.lineno, e.offset, e.msg)
+        tree = MypyFile([], [], False, set())
 
-    return MypyFile([], [], False, set())
+    if raise_on_error and errors.is_errors():
+        errors.raise_error()
 
+    return tree
 
-def parse_type_comment(type_comment: str, line: int) -> Type:
+
+def parse_type_comment(type_comment: str, line: int, errors: Errors) -> Optional[Type]:
     try:
         typ = ast35.parse(type_comment, '<type_comment>', 'eval')
     except SyntaxError as e:
-        raise TypeCommentParseError(TYPE_COMMENT_SYNTAX_ERROR, line, e.offset)
+        errors.report(line, e.offset, TYPE_COMMENT_SYNTAX_ERROR)
+        return None
     else:
         assert isinstance(typ, ast35.Expression)
-        return TypeConverter(line=line).visit(typ.body)
+        return TypeConverter(errors, line=line).visit(typ.body)
 
 
 def with_line(f: Callable[['ASTConverter', T], U]) -> Callable[['ASTConverter', T], U]:
@@ -104,18 +111,32 @@ def find(f: Callable[[V], bool], seq: Sequence[V]) -> V:
     return None
 
 
+def is_no_type_check_decorator(expr: ast35.expr) -> bool:
+    if isinstance(expr, ast35.Name):
+        return expr.id == 'no_type_check'
+    elif isinstance(expr, ast35.Attribute):
+        if isinstance(expr.value, ast35.Name):
+            return expr.value.id == 'typing' and expr.attr == 'no_type_check'
+    return False
+
+
 class ASTConverter(ast35.NodeTransformer):
     def __init__(self,
                  pyversion: Tuple[int, int],
                  is_stub: bool,
+                 errors: Errors,
                  custom_typing_module: str = None) -> None:
         self.class_nesting = 0
         self.imports = []  # type: List[ImportBase]
 
         self.pyversion = pyversion
         self.is_stub = is_stub
+        self.errors = errors
         self.custom_typing_module = custom_typing_module
 
+    def fail(self, msg: str, line: int, column: int) -> None:
+        self.errors.report(line, column, msg)
+
     def generic_visit(self, node: ast35.AST) -> None:
         raise RuntimeError('AST node not implemented: ' + str(type(node)))
 
@@ -259,7 +280,10 @@ class ASTConverter(ast35.NodeTransformer):
     def do_func_def(self, n: Union[ast35.FunctionDef, ast35.AsyncFunctionDef],
                     is_coroutine: bool = False) -> Union[FuncDef, Decorator]:
         """Helper shared between visit_FunctionDef and visit_AsyncFunctionDef."""
-        args = self.transform_args(n.args, n.lineno)
+        no_type_check = bool(n.decorator_list and
+                             any(is_no_type_check_decorator(d) for d in n.decorator_list))
+
+        args = self.transform_args(n.args, n.lineno, no_type_check=no_type_check)
 
         arg_kinds = [arg.kind for arg in args]
         arg_names = [arg.variable.name() for arg in args]  # type: List[Optional[str]]
@@ -267,30 +291,42 @@ class ASTConverter(ast35.NodeTransformer):
         if special_function_elide_names(n.name):
             arg_names = [None] * len(arg_names)
         arg_types = None  # type: List[Type]
-        if n.type_comment is not None:
+        if no_type_check:
+            arg_types = [None] * len(args)
+            return_type = None
+        elif n.type_comment is not None:
             try:
                 func_type_ast = ast35.parse(n.type_comment, '<func_type>', 'func_type')
+                assert isinstance(func_type_ast, ast35.FunctionType)
+                # for ellipsis arg
+                if (len(func_type_ast.argtypes) == 1 and
+                        isinstance(func_type_ast.argtypes[0], ast35.Ellipsis)):
+                    if n.returns:
+                        # PEP 484 disallows both type annotations and type comments
+                        self.fail(messages.DUPLICATE_TYPE_SIGNATURES, n.lineno, n.col_offset)
+                    arg_types = [a.type_annotation if a.type_annotation is not None else AnyType()
+                                 for a in args]
+                else:
+                    # PEP 484 disallows both type annotations and type comments
+                    if n.returns or any(a.type_annotation is not None for a in args):
+                        self.fail(messages.DUPLICATE_TYPE_SIGNATURES, n.lineno, n.col_offset)
+                    translated_args = (TypeConverter(self.errors, line=n.lineno)
+                                       .translate_expr_list(func_type_ast.argtypes))
+                    arg_types = [a if a is not None else AnyType()
+                                for a in translated_args]
+                return_type = TypeConverter(self.errors,
+                                            line=n.lineno).visit(func_type_ast.returns)
+
+                # add implicit self type
+                if self.in_class() and len(arg_types) < len(args):
+                    arg_types.insert(0, AnyType())
             except SyntaxError:
-                raise TypeCommentParseError(TYPE_COMMENT_SYNTAX_ERROR, n.lineno, n.col_offset)
-            assert isinstance(func_type_ast, ast35.FunctionType)
-            # for ellipsis arg
-            if (len(func_type_ast.argtypes) == 1 and
-                    isinstance(func_type_ast.argtypes[0], ast35.Ellipsis)):
-                arg_types = [a.type_annotation if a.type_annotation is not None else AnyType()
-                             for a in args]
-            else:
-                translated_args = (TypeConverter(line=n.lineno)
-                                   .translate_expr_list(func_type_ast.argtypes))
-                arg_types = [a if a is not None else AnyType()
-                             for a in translated_args]
-            return_type = TypeConverter(line=n.lineno).visit(func_type_ast.returns)
-
-            # add implicit self type
-            if self.in_class() and len(arg_types) < len(args):
-                arg_types.insert(0, AnyType())
+                self.fail(TYPE_COMMENT_SYNTAX_ERROR, n.lineno, n.col_offset)
+                arg_types = [AnyType()] * len(args)
+                return_type = AnyType()
         else:
             arg_types = [a.type_annotation for a in args]
-            return_type = TypeConverter(line=n.lineno).visit(n.returns)
+            return_type = TypeConverter(self.errors, line=n.lineno).visit(n.returns)
 
         for arg, arg_type in zip(args, arg_types):
             self.set_type_optional(arg_type, arg.initializer)
@@ -300,17 +336,21 @@ class ASTConverter(ast35.NodeTransformer):
 
         func_type = None
         if any(arg_types) or return_type:
-            if len(arg_types) > len(arg_kinds):
-                raise FastParserError('Type signature has too many arguments', n.lineno, offset=0)
-            if len(arg_types) < len(arg_kinds):
-                raise FastParserError('Type signature has too few arguments', n.lineno, offset=0)
-            func_type = CallableType([a if a is not None else
-                                      AnyType(implicit=True) for a in arg_types],
-                                     arg_kinds,
-                                     arg_names,
-                                     return_type if return_type is not None else
-                                     AnyType(implicit=True),
-                                     None)
+            if len(arg_types) != 1 and any(isinstance(t, EllipsisType) for t in arg_types):
+                self.fail("Ellipses cannot accompany other argument types "
+                          "in function type signature.", n.lineno, 0)
+            elif len(arg_types) > len(arg_kinds):
+                self.fail('Type signature has too many arguments', n.lineno, 0)
+            elif len(arg_types) < len(arg_kinds):
+                self.fail('Type signature has too few arguments', n.lineno, 0)
+            else:
+                func_type = CallableType([a if a is not None else
+                                          AnyType(implicit=True) for a in arg_types],
+                                         arg_kinds,
+                                         arg_names,
+                                         return_type if return_type is not None else
+                                         AnyType(implicit=True),
+                                         None)
 
         func_def = FuncDef(n.name,
                        args,
@@ -343,24 +383,35 @@ class ASTConverter(ast35.NodeTransformer):
         if isinstance(type, UnboundType):
             type.optional = optional
 
-    def transform_args(self, args: ast35.arguments, line: int) -> List[Argument]:
+    def transform_args(self,
+                       args: ast35.arguments,
+                       line: int,
+                       no_type_check: bool = False,
+                       ) -> List[Argument]:
         def make_argument(arg: ast35.arg, default: Optional[ast35.expr], kind: int) -> Argument:
-            arg_type = TypeConverter(line=line).visit(arg.annotation)
+            if no_type_check:
+                arg_type = None
+            else:
+                arg_type = TypeConverter(self.errors, line=line).visit(arg.annotation)
             return Argument(Var(arg.arg), arg_type, self.visit(default), kind)
 
         new_args = []
+        names = []  # type: List[ast35.arg]
         num_no_defaults = len(args.args) - len(args.defaults)
         # positional arguments without defaults
         for a in args.args[:num_no_defaults]:
             new_args.append(make_argument(a, None, ARG_POS))
+            names.append(a)
 
         # positional arguments with defaults
         for a, d in zip(args.args[num_no_defaults:], args.defaults):
             new_args.append(make_argument(a, d, ARG_OPT))
+            names.append(a)
 
         # *arg
         if args.vararg is not None:
             new_args.append(make_argument(args.vararg, None, ARG_STAR))
+            names.append(args.vararg)
 
         # keyword-only arguments with defaults
         for a, d in zip(args.kwonlyargs, args.kw_defaults):
@@ -368,10 +419,20 @@ class ASTConverter(ast35.NodeTransformer):
                 a,
                 d,
                 ARG_NAMED if d is None else ARG_NAMED_OPT))
+            names.append(a)
 
         # **kwarg
         if args.kwarg is not None:
             new_args.append(make_argument(args.kwarg, None, ARG_STAR2))
+            names.append(args.kwarg)
+
+        seen_names = set()  # type: Set[str]
+        for name in names:
+            if name.arg in seen_names:
+                self.fail("duplicate argument '{}' in function definition".format(name.arg),
+                          name.lineno, name.col_offset)
+                break
+            seen_names.add(name.arg)
 
         return new_args
 
@@ -432,14 +493,13 @@ class ASTConverter(ast35.NodeTransformer):
         else:
             new_syntax = False
         if new_syntax and self.pyversion < (3, 6):
-            raise TypeCommentParseError('Variable annotation syntax is only '
-                                        'suppoted in Python 3.6, use type '
-                                        'comment instead', n.lineno, n.col_offset)
+            self.fail('Variable annotation syntax is only supported in Python 3.6, '
+                      'use type comment instead', n.lineno, n.col_offset)
         # typed_ast prevents having both type_comment and annotation.
         if n.type_comment is not None:
-            typ = parse_type_comment(n.type_comment, n.lineno)
+            typ = parse_type_comment(n.type_comment, n.lineno, self.errors)
         elif new_syntax:
-            typ = TypeConverter(line=n.lineno).visit(n.annotation)  # type: ignore
+            typ = TypeConverter(self.errors, line=n.lineno).visit(n.annotation)  # type: ignore
             typ.column = n.annotation.col_offset
         if n.value is None:  # always allow 'x: int'
             rvalue = TempNode(AnyType())  # type: Expression
@@ -460,10 +520,15 @@ class ASTConverter(ast35.NodeTransformer):
     # For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment)
     @with_line
     def visit_For(self, n: ast35.For) -> ForStmt:
+        if n.type_comment is not None:
+            target_type = parse_type_comment(n.type_comment, n.lineno, self.errors)
+        else:
+            target_type = None
         return ForStmt(self.visit(n.target),
                        self.visit(n.iter),
                        self.as_block(n.body, n.lineno),
-                       self.as_block(n.orelse, n.lineno))
+                       self.as_block(n.orelse, n.lineno),
+                       target_type)
 
     # AsyncFor(expr target, expr iter, stmt* body, stmt* orelse)
     @with_line
@@ -492,9 +557,14 @@ class ASTConverter(ast35.NodeTransformer):
     # With(withitem* items, stmt* body, string? type_comment)
     @with_line
     def visit_With(self, n: ast35.With) -> WithStmt:
+        if n.type_comment is not None:
+            target_type = parse_type_comment(n.type_comment, n.lineno, self.errors)
+        else:
+            target_type = None
         return WithStmt([self.visit(i.context_expr) for i in n.items],
                         [self.visit(i.optional_vars) for i in n.items],
-                        self.as_block(n.body, n.lineno))
+                        self.as_block(n.body, n.lineno),
+                        target_type)
 
     # AsyncWith(withitem* items, stmt* body)
     @with_line
@@ -527,7 +597,7 @@ class ASTConverter(ast35.NodeTransformer):
     # Assert(expr test, expr? msg)
     @with_line
     def visit_Assert(self, n: ast35.Assert) -> AssertStmt:
-        return AssertStmt(self.visit(n.test))
+        return AssertStmt(self.visit(n.test), self.visit(n.msg))
 
     # Import(alias* names)
     @with_line
@@ -740,14 +810,14 @@ class ASTConverter(ast35.NodeTransformer):
         return CallExpr(self.visit(n.func),
                         arg_types,
                         arg_kinds,
-                        cast("List[str]", [None for _ in n.args]) + [k.arg for k in n.keywords])
+                        cast("List[str]", [None] * len(n.args)) + [k.arg for k in n.keywords])
 
     # Num(object n) -- a number as a PyObject.
     @with_line
     def visit_Num(self, n: ast35.Num) -> Union[IntExpr, FloatExpr, ComplexExpr]:
         if getattr(n, 'contains_underscores', None) and self.pyversion < (3, 6):
-            raise FastParserError('Underscores in numeric literals are only '
-                                  'supported in Python 3.6', n.lineno, n.col_offset)
+            self.fail('Underscores in numeric literals are only supported in Python 3.6',
+                      n.lineno, n.col_offset)
         if isinstance(n.n, int):
             return IntExpr(n.n)
         elif isinstance(n.n, float):
@@ -845,18 +915,22 @@ class ASTConverter(ast35.NodeTransformer):
 
 
 class TypeConverter(ast35.NodeTransformer):
-    def __init__(self, line: int = -1) -> None:
+    def __init__(self, errors: Errors, line: int = -1) -> None:
+        self.errors = errors
         self.line = line
 
+    def fail(self, msg: str, line: int, column: int) -> None:
+        self.errors.report(line, column, msg)
+
     def visit_raw_str(self, s: str) -> Type:
         # An escape hatch that allows the AST walker in fastparse2 to
         # directly hook into the Python 3.5 type converter in some cases
         # without needing to create an intermediary `ast35.Str` object.
-        return parse_type_comment(s.strip(), line=self.line)
+        return parse_type_comment(s.strip(), self.line, self.errors) or AnyType()
 
-    def generic_visit(self, node: ast35.AST) -> None:
-        raise TypeCommentParseError(TYPE_COMMENT_AST_ERROR, self.line,
-                                    getattr(node, 'col_offset', -1))
+    def generic_visit(self, node: ast35.AST) -> Type:  # type: ignore
+        self.fail(TYPE_COMMENT_AST_ERROR, self.line, getattr(node, 'col_offset', -1))
+        return AnyType()
 
     def visit_NoneType(self, n: Any) -> Type:
         return None
@@ -872,13 +946,13 @@ class TypeConverter(ast35.NodeTransformer):
 
     # Str(string s)
     def visit_Str(self, n: ast35.Str) -> Type:
-        return parse_type_comment(n.s.strip(), line=self.line)
+        return parse_type_comment(n.s.strip(), self.line, self.errors) or AnyType()
 
     # Subscript(expr value, slice slice, expr_context ctx)
     def visit_Subscript(self, n: ast35.Subscript) -> Type:
         if not isinstance(n.slice, ast35.Index):
-            raise TypeCommentParseError(TYPE_COMMENT_SYNTAX_ERROR, self.line,
-                                        getattr(n, 'col_offset', -1))
+            self.fail(TYPE_COMMENT_SYNTAX_ERROR, self.line, getattr(n, 'col_offset', -1))
+            return AnyType()
 
         value = self.visit(n.value)
 
@@ -914,14 +988,3 @@ class TypeConverter(ast35.NodeTransformer):
     # List(expr* elts, expr_context ctx)
     def visit_List(self, n: ast35.List) -> Type:
         return TypeList(self.translate_expr_list(n.elts), line=self.line)
-
-
-class TypeCommentParseError(Exception):
-    def __init__(self, msg: str, lineno: int, offset: int) -> None:
-        self.msg = msg
-        self.lineno = lineno
-        self.offset = offset
-
-
-class FastParserError(TypeCommentParseError):
-    pass
diff --git a/mypy/fastparse2.py b/mypy/fastparse2.py
index b1759db..2f693cb 100644
--- a/mypy/fastparse2.py
+++ b/mypy/fastparse2.py
@@ -17,7 +17,7 @@ two in a typesafe way.
 from functools import wraps
 import sys
 
-from typing import Tuple, Union, TypeVar, Callable, Sequence, Optional, Any, cast, List
+from typing import Tuple, Union, TypeVar, Callable, Sequence, Optional, Any, cast, List, Set
 from mypy.sharedparse import special_function_elide_names, argument_elide_name
 from mypy.nodes import (
     MypyFile, Node, ImportBase, Import, ImportAll, ImportFrom, FuncDef, OverloadedFuncDef,
@@ -30,16 +30,17 @@ from mypy.nodes import (
     FloatExpr, CallExpr, SuperExpr, MemberExpr, IndexExpr, SliceExpr, OpExpr,
     UnaryExpr, FuncExpr, ComparisonExpr, DictionaryComprehension,
     SetComprehension, ComplexExpr, EllipsisExpr, YieldExpr, Argument,
-    Expression, Statement,
+    Expression, Statement, BackquoteExpr, PrintStmt, ExecStmt,
     ARG_POS, ARG_OPT, ARG_STAR, ARG_NAMED, ARG_STAR2
 )
 from mypy.types import (
-    Type, CallableType, AnyType, UnboundType,
+    Type, CallableType, AnyType, UnboundType, EllipsisType
 )
 from mypy import defaults
 from mypy import experiments
+from mypy import messages
 from mypy.errors import Errors
-from mypy.fastparse import TypeConverter, TypeCommentParseError
+from mypy.fastparse import TypeConverter, parse_type_comment
 
 try:
     from typed_ast import ast27
@@ -54,7 +55,6 @@ except ImportError:
         print('The typed_ast package required by --fast-parser is only compatible with'
               ' Python 3.3 and greater.')
     sys.exit(1)
-from mypy.fastparse import FastParserError
 
 T = TypeVar('T', bound=Union[ast27.expr, ast27.stmt])
 U = TypeVar('U', bound=Node)
@@ -74,36 +74,31 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
 
     The pyversion (major, minor) argument determines the Python syntax variant.
     """
+    raise_on_error = False
+    if errors is None:
+        errors = Errors()
+        raise_on_error = True
+    errors.set_file('<input>' if fnam is None else fnam)
     is_stub_file = bool(fnam) and fnam.endswith('.pyi')
     try:
         assert pyversion[0] < 3 and not is_stub_file
         ast = ast27.parse(source, fnam, 'exec')
         tree = ASTConverter(pyversion=pyversion,
                             is_stub=is_stub_file,
+                            errors=errors,
                             custom_typing_module=custom_typing_module,
                             ).visit(ast)
         assert isinstance(tree, MypyFile)
         tree.path = fnam
         tree.is_stub = is_stub_file
-        return tree
-    except (SyntaxError, TypeCommentParseError) as e:
-        if errors:
-            errors.set_file('<input>' if fnam is None else fnam)
-            errors.report(e.lineno, e.offset, e.msg)
-        else:
-            raise
-
-    return MypyFile([], [], False, set())
+    except SyntaxError as e:
+        errors.report(e.lineno, e.offset, e.msg)
+        tree = MypyFile([], [], False, set())
 
+    if raise_on_error and errors.is_errors():
+        errors.raise_error()
 
-def parse_type_comment(type_comment: str, line: int) -> Type:
-    try:
-        typ = ast35.parse(type_comment, '<type_comment>', 'eval')
-    except SyntaxError as e:
-        raise TypeCommentParseError(TYPE_COMMENT_SYNTAX_ERROR, line, e.offset)
-    else:
-        assert isinstance(typ, ast35.Expression)
-        return TypeConverter(line=line).visit(typ.body)
+    return tree
 
 
 def with_line(f: Callable[['ASTConverter', T], U]) -> Callable[['ASTConverter', T], U]:
@@ -122,18 +117,32 @@ def find(f: Callable[[V], bool], seq: Sequence[V]) -> V:
     return None
 
 
+def is_no_type_check_decorator(expr: ast27.expr) -> bool:
+    if isinstance(expr, ast27.Name):
+        return expr.id == 'no_type_check'
+    elif isinstance(expr, ast27.Attribute):
+        if isinstance(expr.value, ast27.Name):
+            return expr.value.id == 'typing' and expr.attr == 'no_type_check'
+    return False
+
+
 class ASTConverter(ast27.NodeTransformer):
     def __init__(self,
                  pyversion: Tuple[int, int],
                  is_stub: bool,
+                 errors: Errors,
                  custom_typing_module: str = None) -> None:
         self.class_nesting = 0
         self.imports = []  # type: List[ImportBase]
 
         self.pyversion = pyversion
         self.is_stub = is_stub
+        self.errors = errors
         self.custom_typing_module = custom_typing_module
 
+    def fail(self, msg: str, line: int, column: int) -> None:
+        self.errors.report(line, column, msg)
+
     def generic_visit(self, node: ast27.AST) -> None:
         raise RuntimeError('AST node not implemented: ' + str(type(node)))
 
@@ -267,8 +276,8 @@ class ASTConverter(ast27.NodeTransformer):
     #              arg? kwarg, expr* defaults)
     @with_line
     def visit_FunctionDef(self, n: ast27.FunctionDef) -> Statement:
-        converter = TypeConverter(line=n.lineno)
-        args = self.transform_args(n.args, n.lineno)
+        converter = TypeConverter(self.errors, line=n.lineno)
+        args, decompose_stmts = self.transform_args(n.args, n.lineno)
 
         arg_kinds = [arg.kind for arg in args]
         arg_names = [arg.variable.name() for arg in args]  # type: List[Optional[str]]
@@ -277,25 +286,33 @@ class ASTConverter(ast27.NodeTransformer):
             arg_names = [None] * len(arg_names)
 
         arg_types = None  # type: List[Type]
-        if n.type_comment is not None and len(n.type_comment) > 0:
+        if (n.decorator_list and any(is_no_type_check_decorator(d) for d in n.decorator_list)):
+            arg_types = [None] * len(args)
+            return_type = None
+        elif n.type_comment is not None and len(n.type_comment) > 0:
             try:
                 func_type_ast = ast35.parse(n.type_comment, '<func_type>', 'func_type')
+                assert isinstance(func_type_ast, ast35.FunctionType)
+                # for ellipsis arg
+                if (len(func_type_ast.argtypes) == 1 and
+                        isinstance(func_type_ast.argtypes[0], ast35.Ellipsis)):
+                    arg_types = [a.type_annotation if a.type_annotation is not None else AnyType()
+                                for a in args]
+                else:
+                    # PEP 484 disallows both type annotations and type comments
+                    if any(a.type_annotation is not None for a in args):
+                        self.fail(messages.DUPLICATE_TYPE_SIGNATURES, n.lineno, n.col_offset)
+                    arg_types = [a if a is not None else AnyType() for
+                                a in converter.translate_expr_list(func_type_ast.argtypes)]
+                return_type = converter.visit(func_type_ast.returns)
+
+                # add implicit self type
+                if self.in_class() and len(arg_types) < len(args):
+                    arg_types.insert(0, AnyType())
             except SyntaxError:
-                raise TypeCommentParseError(TYPE_COMMENT_SYNTAX_ERROR, n.lineno, n.col_offset)
-            assert isinstance(func_type_ast, ast35.FunctionType)
-            # for ellipsis arg
-            if (len(func_type_ast.argtypes) == 1 and
-                    isinstance(func_type_ast.argtypes[0], ast35.Ellipsis)):
-                arg_types = [a.type_annotation if a.type_annotation is not None else AnyType()
-                             for a in args]
-            else:
-                arg_types = [a if a is not None else AnyType() for
-                            a in converter.translate_expr_list(func_type_ast.argtypes)]
-            return_type = converter.visit(func_type_ast.returns)
-
-            # add implicit self type
-            if self.in_class() and len(arg_types) < len(args):
-                arg_types.insert(0, AnyType())
+                self.fail(TYPE_COMMENT_SYNTAX_ERROR, n.lineno, n.col_offset)
+                arg_types = [AnyType()] * len(args)
+                return_type = AnyType()
         else:
             arg_types = [a.type_annotation for a in args]
             return_type = converter.visit(None)
@@ -308,19 +325,26 @@ class ASTConverter(ast27.NodeTransformer):
 
         func_type = None
         if any(arg_types) or return_type:
-            if len(arg_types) > len(arg_kinds):
-                raise FastParserError('Type signature has too many arguments', n.lineno, offset=0)
-            if len(arg_types) < len(arg_kinds):
-                raise FastParserError('Type signature has too few arguments', n.lineno, offset=0)
-            func_type = CallableType([a if a is not None else AnyType() for a in arg_types],
-                                     arg_kinds,
-                                     arg_names,
-                                     return_type if return_type is not None else AnyType(),
-                                     None)
-
+            if len(arg_types) != 1 and any(isinstance(t, EllipsisType) for t in arg_types):
+                self.fail("Ellipses cannot accompany other argument types "
+                          "in function type signature.", n.lineno, 0)
+            elif len(arg_types) > len(arg_kinds):
+                self.fail('Type signature has too many arguments', n.lineno, 0)
+            elif len(arg_types) < len(arg_kinds):
+                self.fail('Type signature has too few arguments', n.lineno, 0)
+            else:
+                func_type = CallableType([a if a is not None else AnyType() for a in arg_types],
+                                        arg_kinds,
+                                        arg_names,
+                                        return_type if return_type is not None else AnyType(),
+                                        None)
+
+        body = self.as_block(n.body, n.lineno)
+        if decompose_stmts:
+            body.body = decompose_stmts + body.body
         func_def = FuncDef(n.name,
                        args,
-                       self.as_block(n.body, n.lineno),
+                       body,
                        func_type)
         if func_type is not None:
             func_type.definition = func_def
@@ -346,23 +370,34 @@ class ASTConverter(ast27.NodeTransformer):
         if isinstance(type, UnboundType):
             type.optional = optional
 
-    def transform_args(self, n: ast27.arguments, line: int) -> List[Argument]:
+    def transform_args(self,
+                       n: ast27.arguments,
+                       line: int,
+                       ) -> Tuple[List[Argument], List[Statement]]:
         # TODO: remove the cast once https://github.com/python/typeshed/pull/522
         # is accepted and synced
         type_comments = cast(List[str], n.type_comments)  # type: ignore
-        converter = TypeConverter(line=line)
+        converter = TypeConverter(self.errors, line=line)
+        decompose_stmts = []  # type: List[Statement]
 
-        def convert_arg(arg: ast27.expr) -> Var:
+        def extract_names(arg: ast27.expr) -> List[str]:
+            if isinstance(arg, ast27.Name):
+                return [arg.id]
+            elif isinstance(arg, ast27.Tuple):
+                return [name for elt in arg.elts for name in extract_names(elt)]
+            else:
+                return []
+
+        def convert_arg(index: int, arg: ast27.expr) -> Var:
             if isinstance(arg, ast27.Name):
                 v = arg.id
             elif isinstance(arg, ast27.Tuple):
-                # TODO: An `arg` object may be a Tuple instead of just an identifier in the
-                # case of Python 2 function definitions/lambdas that use the tuple unpacking
-                # syntax. The `typed_ast.conversions` module ended up just simply passing the
-                # the arg object unmodified (instead of converting it into more args, etc).
-                # This isn't typesafe, since we will no longer be always passing in a string
-                # to `Var`, but we'll do the same here for consistency.
-                v = arg  # type: ignore
+                v = '__tuple_arg_{}'.format(index + 1)
+                rvalue = NameExpr(v)
+                rvalue.set_line(line)
+                assignment = AssignmentStmt([self.visit(arg)], rvalue)
+                assignment.set_line(line)
+                decompose_stmts.append(assignment)
             else:
                 raise RuntimeError("'{}' is not a valid argument.".format(ast27.dump(arg)))
             return Var(v)
@@ -372,8 +407,9 @@ class ASTConverter(ast27.NodeTransformer):
                 return converter.visit_raw_str(type_comments[i])
             return None
 
-        args = [(convert_arg(arg), get_type(i)) for i, arg in enumerate(n.args)]
+        args = [(convert_arg(i, arg), get_type(i)) for i, arg in enumerate(n.args)]
         defaults = self.translate_expr_list(n.defaults)
+        names = [name for arg in n.args for name in extract_names(arg)]  # type: List[str]
 
         new_args = []  # type: List[Argument]
         num_no_defaults = len(args) - len(defaults)
@@ -388,13 +424,22 @@ class ASTConverter(ast27.NodeTransformer):
         # *arg
         if n.vararg is not None:
             new_args.append(Argument(Var(n.vararg), get_type(len(args)), None, ARG_STAR))
+            names.append(n.vararg)
 
         # **kwarg
         if n.kwarg is not None:
             typ = get_type(len(args) + (0 if n.vararg is None else 1))
             new_args.append(Argument(Var(n.kwarg), typ, None, ARG_STAR2))
+            names.append(n.kwarg)
+
+        seen_names = set()  # type: Set[str]
+        for name in names:
+            if name in seen_names:
+                self.fail("duplicate argument '{}' in function definition".format(name), line, 0)
+                break
+            seen_names.add(name)
 
-        return new_args
+        return new_args, decompose_stmts
 
     def stringify_name(self, n: ast27.AST) -> str:
         if isinstance(n, ast27.Name):
@@ -442,7 +487,7 @@ class ASTConverter(ast27.NodeTransformer):
     def visit_Assign(self, n: ast27.Assign) -> AssignmentStmt:
         typ = None
         if n.type_comment:
-            typ = parse_type_comment(n.type_comment, n.lineno)
+            typ = parse_type_comment(n.type_comment, n.lineno, self.errors)
 
         return AssignmentStmt(self.translate_expr_list(n.targets),
                               self.visit(n.value),
@@ -458,10 +503,15 @@ class ASTConverter(ast27.NodeTransformer):
     # For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment)
     @with_line
     def visit_For(self, n: ast27.For) -> ForStmt:
+        if n.type_comment is not None:
+            target_type = parse_type_comment(n.type_comment, n.lineno, self.errors)
+        else:
+            target_type = None
         return ForStmt(self.visit(n.target),
                        self.visit(n.iter),
                        self.as_block(n.body, n.lineno),
-                       self.as_block(n.orelse, n.lineno))
+                       self.as_block(n.orelse, n.lineno),
+                       target_type)
 
     # While(expr test, stmt* body, stmt* orelse)
     @with_line
@@ -480,24 +530,29 @@ class ASTConverter(ast27.NodeTransformer):
     # With(withitem* items, stmt* body, string? type_comment)
     @with_line
     def visit_With(self, n: ast27.With) -> WithStmt:
+        if n.type_comment is not None:
+            target_type = parse_type_comment(n.type_comment, n.lineno, self.errors)
+        else:
+            target_type = None
         return WithStmt([self.visit(n.context_expr)],
                         [self.visit(n.optional_vars)],
-                        self.as_block(n.body, n.lineno))
+                        self.as_block(n.body, n.lineno),
+                        target_type)
 
     @with_line
     def visit_Raise(self, n: ast27.Raise) -> RaiseStmt:
-        e = None
-        if n.type is not None:
-            e = n.type
-
-            if n.inst is not None and not (isinstance(n.inst, ast27.Name) and n.inst.id == "None"):
-                if isinstance(n.inst, ast27.Tuple):
-                    args = n.inst.elts
+        if n.type is None:
+            e = None
+        else:
+            if n.inst is None:
+                e = self.visit(n.type)
+            else:
+                if n.tback is None:
+                    e = TupleExpr([self.visit(n.type), self.visit(n.inst)])
                 else:
-                    args = [n.inst]
-                e = ast27.Call(e, args, [], None, None, lineno=e.lineno, col_offset=-1)
+                    e = TupleExpr([self.visit(n.type), self.visit(n.inst), self.visit(n.tback)])
 
-        return RaiseStmt(self.visit(e), None)
+        return RaiseStmt(e, None)
 
     # TryExcept(stmt* body, excepthandler* handlers, stmt* orelse)
     @with_line
@@ -537,55 +592,23 @@ class ASTConverter(ast27.NodeTransformer):
                        self.as_block(finalbody, lineno))
 
     @with_line
-    def visit_Print(self, n: ast27.Print) -> ExpressionStmt:
-        keywords = []
-        if n.dest is not None:
-            keywords.append(ast27.keyword("file", n.dest))
-
-        if not n.nl:
-            keywords.append(ast27.keyword("end", ast27.Str(" ", 0,
-                                                           lineno=n.lineno, col_offset=-1)))
-
-        # TODO: Rather then desugaring Print into an intermediary ast27.Call object, it might
-        # be more efficient to just directly create a mypy.node.CallExpr object.
-        call = ast27.Call(
-            ast27.Name("print", ast27.Load(), lineno=n.lineno, col_offset=-1),
-            n.values, keywords, None, None,
-            lineno=n.lineno, col_offset=-1)
-        return self.visit_Expr(ast27.Expr(call, lineno=n.lineno, col_offset=-1))
+    def visit_Print(self, n: ast27.Print) -> PrintStmt:
+        return PrintStmt(self.translate_expr_list(n.values), n.nl, self.visit(n.dest))
 
     @with_line
-    def visit_Exec(self, n: ast27.Exec) -> ExpressionStmt:
-        new_globals = n.globals
-        new_locals = n.locals
-
-        if new_globals is None:
-            new_globals = ast27.Name("None", ast27.Load(), lineno=-1, col_offset=-1)
-        if new_locals is None:
-            new_locals = ast27.Name("None", ast27.Load(), lineno=-1, col_offset=-1)
-
-        # TODO: Comment in visit_Print also applies here
-        return self.visit_Expr(ast27.Expr(
-            ast27.Call(
-                ast27.Name("exec", ast27.Load(), lineno=n.lineno, col_offset=-1),
-                [n.body, new_globals, new_locals],
-                [], None, None,
-                lineno=n.lineno, col_offset=-1),
-            lineno=n.lineno, col_offset=-1))
+    def visit_Exec(self, n: ast27.Exec) -> ExecStmt:
+        return ExecStmt(self.visit(n.body),
+                        self.visit(n.globals),
+                        self.visit(n.locals))
 
     @with_line
-    def visit_Repr(self, n: ast27.Repr) -> CallExpr:
-        # TODO: Comment in visit_Print also applies here
-        return self.visit_Call(ast27.Call(
-            ast27.Name("repr", ast27.Load(), lineno=n.lineno, col_offset=-1),
-            n.value,
-            [], None, None,
-            lineno=n.lineno, col_offset=-1))
+    def visit_Repr(self, n: ast27.Repr) -> BackquoteExpr:
+        return BackquoteExpr(self.visit(n.value))
 
     # Assert(expr test, expr? msg)
     @with_line
     def visit_Assert(self, n: ast27.Assert) -> AssertStmt:
-        return AssertStmt(self.visit(n.test))
+        return AssertStmt(self.visit(n.test), self.visit(n.msg))
 
     # Import(alias* names)
     @with_line
@@ -697,12 +720,16 @@ class ASTConverter(ast27.NodeTransformer):
     # Lambda(arguments args, expr body)
     @with_line
     def visit_Lambda(self, n: ast27.Lambda) -> FuncExpr:
-        body = ast27.Return(n.body)
-        body.lineno = n.lineno
-        body.col_offset = n.col_offset
+        args, decompose_stmts = self.transform_args(n.args, n.lineno)
+
+        n_body = ast27.Return(n.body)
+        n_body.lineno = n.lineno
+        n_body.col_offset = n.col_offset
+        body = self.as_block([n_body], n.lineno)
+        if decompose_stmts:
+            body.body = decompose_stmts + body.body
 
-        return FuncExpr(self.transform_args(n.args, n.lineno),
-                        self.as_block([body], n.lineno))
+        return FuncExpr(args, body)
 
     # IfExp(expr test, expr body, expr orelse)
     @with_line
diff --git a/mypy/main.py b/mypy/main.py
index d2970e9..1c1f57e 100644
--- a/mypy/main.py
+++ b/mypy/main.py
@@ -8,11 +8,10 @@ import re
 import sys
 import time
 
-from typing import Any, Dict, List, Mapping, Optional, Set, Tuple, cast
+from typing import Any, Dict, List, Mapping, Optional, Set, Tuple
 
 from mypy import build
 from mypy import defaults
-from mypy import git
 from mypy import experiments
 from mypy import util
 from mypy.build import BuildSource, BuildResult, PYTHON_EXTENSIONS
@@ -36,6 +35,7 @@ def main(script_path: str) -> None:
         bin_dir = find_bin_directory(script_path)
     else:
         bin_dir = None
+    sys.setrecursionlimit(2 ** 14)
     sources, options = process_options(sys.argv[1:])
     serious = False
     try:
@@ -79,8 +79,7 @@ def readlinkabs(link: str) -> str:
     return os.path.join(os.path.dirname(link), path)
 
 
-def type_check_only(sources: List[BuildSource],
-        bin_dir: str, options: Options) -> BuildResult:
+def type_check_only(sources: List[BuildSource], bin_dir: str, options: Options) -> BuildResult:
     # Type-check the program and dependencies and translate to Python.
     return build.build(sources=sources,
                        bin_dir=bin_dir,
@@ -122,18 +121,69 @@ def parse_version(v: str) -> Tuple[int, int]:
             "Invalid python version '{}' (expected format: 'x.y')".format(v))
 
 
+# Make the help output a little less jarring.
+class AugmentedHelpFormatter(argparse.HelpFormatter):
+    def __init__(self, prog: Optional[str]) -> None:
+        super().__init__(prog=prog, max_help_position=28)
+
+
+# Define pairs of flag prefixes with inverse meaning.
+flag_prefix_pairs = [
+    ('allow', 'disallow'),
+    ('show', 'hide'),
+]
+flag_prefix_map = {}  # type: Dict[str, str]
+for a, b in flag_prefix_pairs:
+    flag_prefix_map[a] = b
+    flag_prefix_map[b] = a
+
+
+def invert_flag_name(flag: str) -> str:
+    split = flag[2:].split('-', 1)
+    if len(split) == 2:
+        prefix, rest = split
+        if prefix in flag_prefix_map:
+            return '--{}-{}'.format(flag_prefix_map[prefix], rest)
+        elif prefix == 'no':
+            return '--{}'.format(rest)
+
+    return '--no-{}'.format(flag[2:])
+
+
 def process_options(args: List[str],
                     require_targets: bool = True
                     ) -> Tuple[List[BuildSource], Options]:
     """Parse command line arguments."""
 
-    # Make the help output a little less jarring.
-    help_factory = (lambda prog:
-                    argparse.RawDescriptionHelpFormatter(prog=prog,
-                                                         max_help_position=28))  # type: Any
     parser = argparse.ArgumentParser(prog='mypy', epilog=FOOTER,
                                      fromfile_prefix_chars='@',
-                                     formatter_class=help_factory)
+                                     formatter_class=AugmentedHelpFormatter)
+
+    strict_flag_names = []  # type: List[str]
+    strict_flag_assignments = []  # type: List[Tuple[str, bool]]
+
+    def add_invertible_flag(flag: str,
+                            *,
+                            inverse: str = None,
+                            default: bool,
+                            dest: str = None,
+                            help: str,
+                            strict_flag: bool = False
+                            ) -> None:
+        if inverse is None:
+            inverse = invert_flag_name(flag)
+        arg = parser.add_argument(flag,  # type: ignore  # incorrect stub for add_argument
+                                  action='store_false' if default else 'store_true',
+                                  dest=dest,
+                                  help=help + " (inverse: {})".format(inverse))
+        dest = arg.dest
+        arg = parser.add_argument(inverse,  # type: ignore  # incorrect stub for add_argument
+                                  action='store_true' if default else 'store_false',
+                                  dest=dest,
+                                  help=argparse.SUPPRESS)
+        if strict_flag:
+            strict_flag_names.append(flag)
+            strict_flag_assignments.append((dest, not default))
 
     # Unless otherwise specified, arguments will be parsed directly onto an
     # Options object.  Options that require further processing should have
@@ -154,37 +204,36 @@ def process_options(args: List[str],
                         help="silently ignore imports of missing modules")
     parser.add_argument('--follow-imports', choices=['normal', 'silent', 'skip', 'error'],
                         default='normal', help="how to treat imports (default normal)")
-    parser.add_argument('--disallow-untyped-calls', action='store_true',
+    add_invertible_flag('--disallow-untyped-calls', default=False, strict_flag=True,
                         help="disallow calling functions without type annotations"
                         " from functions with type annotations")
-    parser.add_argument('--disallow-untyped-defs', action='store_true',
+    add_invertible_flag('--disallow-untyped-defs', default=False, strict_flag=True,
                         help="disallow defining functions without type annotations"
                         " or with incomplete type annotations")
-    parser.add_argument('--check-untyped-defs', action='store_true',
+    add_invertible_flag('--check-untyped-defs', default=False, strict_flag=True,
                         help="type check the interior of functions without type annotations")
-    parser.add_argument('--disallow-subclassing-any', action='store_true',
+    add_invertible_flag('--disallow-subclassing-any', default=False, strict_flag=True,
                         help="disallow subclassing values of type 'Any' when defining classes")
-    parser.add_argument('--warn-incomplete-stub', action='store_true',
+    add_invertible_flag('--warn-incomplete-stub', default=False,
                         help="warn if missing type annotation in typeshed, only relevant with"
                         " --check-untyped-defs enabled")
-    parser.add_argument('--warn-redundant-casts', action='store_true',
+    add_invertible_flag('--warn-redundant-casts', default=False, strict_flag=True,
                         help="warn about casting an expression to its inferred type")
-    parser.add_argument('--warn-no-return', action='store_true',
+    add_invertible_flag('--warn-no-return', default=False,
                         help="warn about functions that end without returning")
-    parser.add_argument('--warn-unused-ignores', action='store_true',
+    add_invertible_flag('--warn-unused-ignores', default=False, strict_flag=True,
                         help="warn about unneeded '# type: ignore' comments")
-    parser.add_argument('--show-error-context', action='store_false',
+    add_invertible_flag('--show-error-context', default=True,
                         dest='hide_error_context',
                         help='Precede errors with "note:" messages explaining context')
-    parser.add_argument('--fast-parser', action='store_true',
-                        help="enable fast parser (recommended except on Windows)")
+    add_invertible_flag('--no-fast-parser', default=True, dest='fast_parser',
+                        help="disable the fast parser (not recommended)")
     parser.add_argument('-i', '--incremental', action='store_true',
                         help="enable experimental module cache")
     parser.add_argument('--cache-dir', action='store', metavar='DIR',
                         help="store module cache info in the given folder in incremental mode "
                         "(defaults to '{}')".format(defaults.CACHE_DIR))
-    parser.add_argument('--strict-optional', action='store_true',
-                        dest='strict_optional',
+    add_invertible_flag('--strict-optional', default=False, strict_flag=True,
                         help="enable experimental strict Optional checks")
     parser.add_argument('--strict-optional-whitelist', metavar='GLOB', nargs='*',
                         help="suppress strict Optional errors in all but the provided files "
@@ -207,12 +256,17 @@ def process_options(args: List[str],
     parser.add_argument('--config-file',
                         help="Configuration file, must have a [mypy] section "
                         "(defaults to {})".format(defaults.CONFIG_FILE))
-    parser.add_argument('--show-column-numbers', action='store_true',
-                        dest='show_column_numbers',
+    add_invertible_flag('--show-column-numbers', default=False,
                         help="Show column numbers in error messages")
     parser.add_argument('--find-occurrences', metavar='CLASS.MEMBER',
                         dest='special-opts:find_occurrences',
                         help="print out all usages of a class member (experimental)")
+    add_invertible_flag('--strict-boolean', default=False, strict_flag=True,
+                        help='enable strict boolean checks in conditions')
+    strict_help = "Strict mode. Enables the following flags: {}".format(
+        ", ".join(strict_flag_names))
+    parser.add_argument('--strict', action='store_true', dest='special-opts:strict',
+                        help=strict_help)
     # hidden options
     # --shadow-file a.py tmp.py will typecheck tmp.py in place of a.py.
     # Useful for tools to make transformations to a file to get more
@@ -226,9 +280,6 @@ def process_options(args: List[str],
     parser.add_argument('--debug-cache', action='store_true', help=argparse.SUPPRESS)
     # --dump-graph will dump the contents of the graph of SCCs and exit.
     parser.add_argument('--dump-graph', action='store_true', help=argparse.SUPPRESS)
-    parser.add_argument('--hide-error-context', action='store_true',
-                        dest='hide_error_context',
-                        help=argparse.SUPPRESS)
     # deprecated options
     parser.add_argument('-f', '--dirty-stubs', action='store_true',
                         dest='special-opts:dirty_stubs',
@@ -255,7 +306,7 @@ def process_options(args: List[str],
     code_group.add_argument('-m', '--module', action='append', metavar='MODULE',
                             dest='special-opts:modules',
                             help="type-check module; can repeat for more modules")
-    # TODO: `mypy -p A -p B` currently silently ignores ignores A
+    # TODO: `mypy -p A -p B` currently silently ignores A
     # (last option wins).  Perhaps -c, -m and -p could just be
     # command-line flags that modify how we interpret self.files?
     code_group.add_argument('-c', '--command', action='append', metavar='PROGRAM_TEXT',
@@ -267,7 +318,7 @@ def process_options(args: List[str],
                             help="type-check given files or directories")
 
     # Parse arguments once into a dummy namespace so we can get the
-    # filename for the config file.
+    # filename for the config file and know if the user requested all strict options.
     dummy = argparse.Namespace()
     parser.parse_args(args, dummy)
     config_file = defaults.CONFIG_FILE
@@ -281,6 +332,12 @@ def process_options(args: List[str],
     if config_file and os.path.exists(config_file):
         parse_config_file(options, config_file)
 
+    # Set strict flags before parsing (if strict mode enabled), so other command
+    # line options can override.
+    if getattr(dummy, 'special-opts:strict'):
+        for dest, value in strict_flag_assignments:
+            setattr(options, dest, value)
+
     # Parse command line for real, using a split namespace.
     special_opts = argparse.Namespace()
     parser.parse_args(args, SplitNamespace(options, special_opts, 'special-opts:'))
@@ -313,9 +370,9 @@ def process_options(args: List[str],
     # Check for invalid argument combinations.
     if require_targets:
         code_methods = sum(bool(c) for c in [special_opts.modules,
-                                            special_opts.command,
-                                            special_opts.package,
-                                            special_opts.files])
+                                             special_opts.command,
+                                             special_opts.package,
+                                             special_opts.files])
         if code_methods == 0:
             parser.error("Missing target module, package, files, or command.")
         elif code_methods > 1:
@@ -358,7 +415,8 @@ def process_options(args: List[str],
         return targets, options
     elif special_opts.command:
         options.build_type = BuildType.PROGRAM_TEXT
-        return [BuildSource(None, None, '\n'.join(special_opts.command))], options
+        targets = [BuildSource(None, None, '\n'.join(special_opts.command))]
+        return targets, options
     else:
         targets = []
         for f in special_opts.files:
@@ -481,7 +539,7 @@ config_types = {
     # These two are for backwards compatibility
     'silent_imports': bool,
     'almost_silent': bool,
-}  # type: Dict[str, Any]
+}
 
 
 def parse_config_file(options: Options, filename: str) -> None:
diff --git a/mypy/messages.py b/mypy/messages.py
index 53c46ac..3d38333 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -76,6 +76,9 @@ INVALID_TYPEDDICT_ARGS = \
     'Expected keyword arguments, {...}, or dict(...) in TypedDict constructor'
 TYPEDDICT_ITEM_NAME_MUST_BE_STRING_LITERAL = \
     'Expected TypedDict item name to be string literal'
+MALFORMED_ASSERT = 'Assertion is always true, perhaps remove parentheses?'
+NON_BOOLEAN_IN_CONDITIONAL = 'Condition must be a boolean'
+DUPLICATE_TYPE_SIGNATURES = 'Function has duplicate type signatures'
 
 ARG_CONSTRUCTOR_NAMES = {
     ARG_POS: "Arg",
@@ -157,9 +160,14 @@ class MessageBuilder:
 
     def note(self, msg: str, context: Context, file: str = None,
              origin: Context = None) -> None:
-        """Report an error message (unless disabled)."""
+        """Report a note (unless disabled)."""
         self.report(msg, context, 'note', file=file, origin=origin)
 
+    def warn(self, msg: str, context: Context, file: str = None,
+             origin: Context = None) -> None:
+        """Report a warning message (unless disabled)."""
+        self.report(msg, context, 'warning', file=file, origin=origin)
+
     def format(self, typ: Type, verbosity: int = 0) -> str:
         """Convert a type to a relatively short string that is suitable for error messages.
 
diff --git a/mypy/myunit/__init__.py b/mypy/myunit/__init__.py
new file mode 100644
index 0000000..26b9a45
--- /dev/null
+++ b/mypy/myunit/__init__.py
@@ -0,0 +1,380 @@
+import importlib
+import os
+import sys
+import re
+import tempfile
+import time
+import traceback
+
+from typing import List, Tuple, Any, Callable, Union, cast
+from types import TracebackType
+
+
+# TODO remove global state
+is_verbose = False
+is_quiet = False
+patterns = []  # type: List[str]
+times = []  # type: List[Tuple[float, str]]
+
+
+class AssertionFailure(Exception):
+    """Exception used to signal failed test cases."""
+    def __init__(self, s: str = None) -> None:
+        if s:
+            super().__init__(s)
+        else:
+            super().__init__()
+
+
+class SkipTestCaseException(Exception):
+    """Exception used to signal skipped test cases."""
+    pass
+
+
+def assert_true(b: bool, msg: str = None) -> None:
+    if not b:
+        raise AssertionFailure(msg)
+
+
+def assert_false(b: bool, msg: str = None) -> None:
+    if b:
+        raise AssertionFailure(msg)
+
+
+def good_repr(obj: object) -> str:
+    if isinstance(obj, str):
+        if obj.count('\n') > 1:
+            bits = ["'''\\"]
+            for line in obj.split('\n'):
+                # force repr to use ' not ", then cut it off
+                bits.append(repr('"' + line)[2:-1])
+            bits[-1] += "'''"
+            return '\n'.join(bits)
+    return repr(obj)
+
+
+def assert_equal(a: object, b: object, fmt: str = '{} != {}') -> None:
+    if a != b:
+        raise AssertionFailure(fmt.format(good_repr(a), good_repr(b)))
+
+
+def assert_not_equal(a: object, b: object, fmt: str = '{} == {}') -> None:
+    if a == b:
+        raise AssertionFailure(fmt.format(good_repr(a), good_repr(b)))
+
+
+def assert_raises(typ: type, *rest: Any) -> None:
+    """Usage: assert_raises(exception class[, message], function[, args])
+
+    Call function with the given arguments and expect an exception of the given
+    type.
+
+    TODO use overloads for better type checking
+    """
+    # Parse arguments.
+    msg = None  # type: str
+    if isinstance(rest[0], str) or rest[0] is None:
+        msg = rest[0]
+        rest = rest[1:]
+    f = rest[0]
+    args = []  # type: List[Any]
+    if len(rest) > 1:
+        args = rest[1]
+        assert len(rest) <= 2
+
+    # Perform call and verify the exception.
+    try:
+        f(*args)
+    except BaseException as e:
+        if isinstance(e, KeyboardInterrupt):
+            raise
+        assert_type(typ, e)
+        if msg:
+            assert_equal(e.args[0], msg, 'Invalid message {}, expected {}')
+    else:
+        raise AssertionFailure('No exception raised')
+
+
+def assert_type(typ: type, value: object) -> None:
+    if type(value) != typ:
+        raise AssertionFailure('Invalid type {}, expected {}'.format(
+            typename(type(value)), typename(typ)))
+
+
+def fail() -> None:
+    raise AssertionFailure()
+
+
+class TestCase:
+    def __init__(self, name: str, suite: 'Suite' = None,
+                 func: Callable[[], None] = None) -> None:
+        self.func = func
+        self.name = name
+        self.suite = suite
+        self.old_cwd = None  # type: str
+        self.tmpdir = None  # type: tempfile.TemporaryDirectory
+
+    def run(self) -> None:
+        if self.func:
+            self.func()
+
+    def set_up(self) -> None:
+        self.old_cwd = os.getcwd()
+        self.tmpdir = tempfile.TemporaryDirectory(prefix='mypy-test-',
+                dir=os.path.abspath('tmp-test-dirs'))
+        os.chdir(self.tmpdir.name)
+        os.mkdir('tmp')
+        if self.suite:
+            self.suite.set_up()
+
+    def tear_down(self) -> None:
+        if self.suite:
+            self.suite.tear_down()
+        os.chdir(self.old_cwd)
+        self.tmpdir.cleanup()
+        self.old_cwd = None
+        self.tmpdir = None
+
+
+class Suite:
+    def __init__(self) -> None:
+        self.prefix = typename(type(self)) + '.'
+        # Each test case is either a TestCase object or (str, function).
+        self._test_cases = []  # type: List[Any]
+        self.init()
+
+    def set_up(self) -> None:
+        pass
+
+    def tear_down(self) -> None:
+        pass
+
+    def init(self) -> None:
+        for m in dir(self):
+            if m.startswith('test'):
+                t = getattr(self, m)
+                if isinstance(t, Suite):
+                    self.add_test((m + '.', t))
+                else:
+                    self.add_test(TestCase(m, self, getattr(self, m)))
+
+    def add_test(self, test: Union[TestCase,
+                                   Tuple[str, Callable[[], None]],
+                                   Tuple[str, 'Suite']]) -> None:
+        self._test_cases.append(test)
+
+    def cases(self) -> List[Any]:
+        return self._test_cases[:]
+
+    def skip(self) -> None:
+        raise SkipTestCaseException()
+
+
+def add_suites_from_module(suites: List[Suite], mod_name: str) -> None:
+    mod = importlib.import_module(mod_name)
+    got_suite = False
+    for suite in mod.__dict__.values():
+        if isinstance(suite, type) and issubclass(suite, Suite) and suite is not Suite:
+            got_suite = True
+            suites.append(cast(Callable[[], Suite], suite)())
+    if not got_suite:
+        # Sanity check in case e.g. it uses unittest instead of a myunit.
+        # The codecs tests do since they need to be python2-compatible.
+        sys.exit('Test module %s had no test!' % mod_name)
+
+
+class ListSuite(Suite):
+    def __init__(self, suites: List[Suite]) -> None:
+        for suite in suites:
+            mod_name = type(suite).__module__.replace('.', '_')
+            mod_name = mod_name.replace('mypy_', '')
+            mod_name = mod_name.replace('test_', '')
+            mod_name = mod_name.strip('_').replace('__', '_')
+            type_name = type(suite).__name__
+            name = 'test_%s_%s' % (mod_name, type_name)
+            setattr(self, name, suite)
+        super().__init__()
+
+
+def main(args: List[str] = None) -> None:
+    global patterns, is_verbose, is_quiet
+    if not args:
+        args = sys.argv[1:]
+    is_verbose = False
+    is_quiet = False
+    suites = []  # type: List[Suite]
+    patterns = []
+    i = 0
+    while i < len(args):
+        a = args[i]
+        if a == '-v':
+            is_verbose = True
+        elif a == '-q':
+            is_quiet = True
+        elif a == '-m':
+            i += 1
+            if i == len(args):
+                sys.exit('-m requires an argument')
+            add_suites_from_module(suites, args[i])
+        elif not a.startswith('-'):
+            patterns.append(a)
+        else:
+            sys.exit('Usage: python -m mypy.myunit [-v] [-q]'
+                    + ' -m mypy.test.module [-m mypy.test.module ...] [filter ...]')
+        i += 1
+    if len(patterns) == 0:
+        patterns.append('*')
+    if not suites:
+        sys.exit('At least one -m argument is required')
+
+    t = ListSuite(suites)
+    num_total, num_fail, num_skip = run_test_recursive(t, 0, 0, 0, '', 0)
+
+    skip_msg = ''
+    if num_skip > 0:
+        skip_msg = ', {} skipped'.format(num_skip)
+
+    if num_fail == 0:
+        if not is_quiet:
+            print('%d test cases run%s, all passed.' % (num_total, skip_msg))
+            print('*** OK ***')
+    else:
+        sys.stderr.write('%d/%d test cases failed%s.\n' % (num_fail,
+                                                           num_total,
+                                                           skip_msg))
+        sys.stderr.write('*** FAILURE ***\n')
+        sys.exit(1)
+
+
+def run_test_recursive(test: Any, num_total: int, num_fail: int, num_skip: int,
+                       prefix: str, depth: int) -> Tuple[int, int, int]:
+    """The first argument may be TestCase, Suite or (str, Suite)."""
+    if isinstance(test, TestCase):
+        name = prefix + test.name
+        for pattern in patterns:
+            if match_pattern(name, pattern):
+                match = True
+                break
+        else:
+            match = False
+        if match:
+            is_fail, is_skip = run_single_test(name, test)
+            if is_fail: num_fail += 1
+            if is_skip: num_skip += 1
+            num_total += 1
+    else:
+        suite = None  # type: Suite
+        suite_prefix = ''
+        if isinstance(test, list) or isinstance(test, tuple):
+            suite = test[1]
+            suite_prefix = test[0]
+        else:
+            suite = test
+            suite_prefix = test.prefix
+
+        for stest in suite.cases():
+            new_prefix = prefix
+            if depth > 0:
+                new_prefix = prefix + suite_prefix
+            num_total, num_fail, num_skip = run_test_recursive(
+                stest, num_total, num_fail, num_skip, new_prefix, depth + 1)
+    return num_total, num_fail, num_skip
+
+
+def run_single_test(name: str, test: Any) -> Tuple[bool, bool]:
+    if is_verbose:
+        sys.stderr.write(name)
+        sys.stderr.flush()
+
+    time0 = time.time()
+    test.set_up()  # FIX: check exceptions
+    exc_traceback = None  # type: Any
+    try:
+        test.run()
+    except BaseException as e:
+        if isinstance(e, KeyboardInterrupt):
+            raise
+        exc_type, exc_value, exc_traceback = sys.exc_info()
+    test.tear_down()  # FIX: check exceptions
+    times.append((time.time() - time0, name))
+
+    if exc_traceback:
+        if isinstance(exc_value, SkipTestCaseException):
+            if is_verbose:
+                sys.stderr.write(' (skipped)\n')
+            return False, True
+        else:
+            handle_failure(name, exc_type, exc_value, exc_traceback)
+            return True, False
+    elif is_verbose:
+        sys.stderr.write('\n')
+
+    return False, False
+
+
+def handle_failure(name: str,
+                   exc_type: type,
+                   exc_value: BaseException,
+                   exc_traceback: TracebackType,
+                   ) -> None:
+    # Report failed test case.
+    if is_verbose:
+        sys.stderr.write('\n\n')
+    msg = ''
+    if exc_value.args and exc_value.args[0]:
+        msg = ': ' + str(exc_value)
+    else:
+        msg = ''
+    if not isinstance(exc_value, SystemExit):
+        # We assume that before doing exit() (which raises SystemExit) we've printed
+        # enough context about what happened so that a stack trace is not useful.
+        # In particular, uncaught exceptions during semantic analysis or type checking
+        # call exit() and they already print out a stack trace.
+        sys.stderr.write('Traceback (most recent call last):\n')
+        tb = traceback.format_tb(exc_traceback)
+        tb = clean_traceback(tb)
+        for s in tb:
+            sys.stderr.write(s)
+    else:
+        sys.stderr.write('\n')
+    exception = typename(exc_type)
+    sys.stderr.write('{}{}\n\n'.format(exception, msg))
+    sys.stderr.write('{} failed\n\n'.format(name))
+
+
+def typename(t: type) -> str:
+    if '.' in str(t):
+        return str(t).split('.')[-1].rstrip("'>")
+    else:
+        return str(t)[8:-2]
+
+
+def match_pattern(s: str, p: str) -> bool:
+    if len(p) == 0:
+        return len(s) == 0
+    elif p[0] == '*':
+        if len(p) == 1:
+            return True
+        else:
+            for i in range(len(s) + 1):
+                if match_pattern(s[i:], p[1:]):
+                    return True
+            return False
+    elif len(s) == 0:
+        return False
+    else:
+        return s[0] == p[0] and match_pattern(s[1:], p[1:])
+
+
+def clean_traceback(tb: List[str]) -> List[str]:
+    # Remove clutter from the traceback.
+    start = 0
+    for i, s in enumerate(tb):
+        if '\n    test.run()\n' in s or '\n    self.func()\n' in s:
+            start = i + 1
+    tb = tb[start:]
+    for f in ['assert_equal', 'assert_not_equal', 'assert_type',
+              'assert_raises', 'assert_true']:
+        if tb != [] and ', in {}\n'.format(f) in tb[-1]:
+            tb = tb[:-1]
+    return tb
diff --git a/mypy/myunit/__main__.py b/mypy/myunit/__main__.py
new file mode 100644
index 0000000..78ef01f
--- /dev/null
+++ b/mypy/myunit/__main__.py
@@ -0,0 +1,18 @@
+# This is a separate module from mypy.myunit so it doesn't exist twice.
+"""Myunit test runner command line tool.
+
+Usually used as a slave by runtests.py, but can be used directly.
+"""
+
+from mypy.myunit import main
+
+# In Python 3.3, mypy.__path__ contains a relative path to the mypy module
+# (whereas in later Python versions it contains an absolute path).  Because the
+# test runner changes directories, this breaks non-toplevel mypy imports.  We
+# fix that problem by fixing up the path to be absolute here.
+import os.path
+import mypy
+# User-defined packages always have __path__ attributes, but mypy doesn't know that.
+mypy.__path__ = [os.path.abspath(p) for p in mypy.__path__]  # type: ignore
+
+main()
diff --git a/mypy/nodes.py b/mypy/nodes.py
index eeff57a..c9485fe 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -9,7 +9,7 @@ from typing import (
 
 from mypy.lex import Token
 import mypy.strconv
-from mypy.visitor import NodeVisitor
+from mypy.visitor import NodeVisitor, ExpressionVisitor
 from mypy.util import dump_tagged, short_type
 
 
@@ -75,7 +75,7 @@ inverse_node_kinds = {_kind: _name for _name, _kind in node_kinds.items()}
 
 
 implicit_module_attrs = {'__name__': '__builtins__.str',
-                         '__doc__': '__builtins__.str',
+                         '__doc__': None,  # depends on Python version, see semanal.py
                          '__file__': '__builtins__.str',
                          '__package__': '__builtins__.str'}
 
@@ -143,6 +143,8 @@ class Statement(Node):
 
 class Expression(Node):
     """An expression node."""
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
+        raise RuntimeError('Not implemented')
 
 
 # TODO:
@@ -848,6 +850,8 @@ class WhileStmt(Statement):
 class ForStmt(Statement):
     # Index variables
     index = None  # type: Lvalue
+    # Type given by type comments for index, can be None
+    index_type = None  # type: mypy.types.Type
     # Expression to iterate
     expr = None  # type: Expression
     body = None  # type: Block
@@ -855,8 +859,9 @@ class ForStmt(Statement):
     is_async = False  # True if `async for ...` (PEP 492, Python 3.5)
 
     def __init__(self, index: Lvalue, expr: Expression, body: Block,
-                 else_body: Block) -> None:
+                 else_body: Block, index_type: 'mypy.types.Type' = None) -> None:
         self.index = index
+        self.index_type = index_type
         self.expr = expr
         self.body = body
         self.else_body = else_body
@@ -877,9 +882,11 @@ class ReturnStmt(Statement):
 
 class AssertStmt(Statement):
     expr = None  # type: Expression
+    msg = None  # type: Optional[Expression]
 
-    def __init__(self, expr: Expression) -> None:
+    def __init__(self, expr: Expression, msg: Expression = None) -> None:
         self.expr = expr
+        self.msg = msg
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
         return visitor.visit_assert_stmt(self)
@@ -962,13 +969,16 @@ class TryStmt(Statement):
 class WithStmt(Statement):
     expr = None  # type: List[Expression]
     target = None  # type: List[Lvalue]
+    # Type given by type comments for target, can be None
+    target_type = None  # type: mypy.types.Type
     body = None  # type: Block
     is_async = False  # True if `async with ...` (PEP 492, Python 3.5)
 
     def __init__(self, expr: List[Expression], target: List[Lvalue],
-                 body: Block) -> None:
+                 body: Block, target_type: 'mypy.types.Type' = None) -> None:
         self.expr = expr
         self.target = target
+        self.target_type = target_type
         self.body = body
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
@@ -1023,7 +1033,7 @@ class IntExpr(Expression):
         self.value = value
         self.literal_hash = ('Literal', value)
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_int_expr(self)
 
 
@@ -1048,7 +1058,7 @@ class StrExpr(Expression):
         self.value = value
         self.literal_hash = ('Literal', value)
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_str_expr(self)
 
 
@@ -1062,7 +1072,7 @@ class BytesExpr(Expression):
         self.value = value
         self.literal_hash = ('Literal', value)
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_bytes_expr(self)
 
 
@@ -1076,7 +1086,7 @@ class UnicodeExpr(Expression):
         self.value = value
         self.literal_hash = ('Literal', value)
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_unicode_expr(self)
 
 
@@ -1090,7 +1100,7 @@ class FloatExpr(Expression):
         self.value = value
         self.literal_hash = ('Literal', value)
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_float_expr(self)
 
 
@@ -1104,14 +1114,14 @@ class ComplexExpr(Expression):
         self.value = value
         self.literal_hash = ('Literal', value)
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_complex_expr(self)
 
 
 class EllipsisExpr(Expression):
     """Ellipsis (...)"""
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_ellipsis(self)
 
 
@@ -1128,7 +1138,7 @@ class StarExpr(Expression):
         # Whether this starred expression is used in a tuple/list and as lvalue
         self.valid = False
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_star_expr(self)
 
 
@@ -1160,7 +1170,7 @@ class NameExpr(RefExpr):
         self.name = name
         self.literal_hash = ('Var', name,)
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_name_expr(self)
 
     def serialize(self) -> JsonDict:
@@ -1201,7 +1211,7 @@ class MemberExpr(RefExpr):
         self.literal = self.expr.literal
         self.literal_hash = ('Member', expr.literal_hash, name)
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_member_expr(self)
 
 
@@ -1248,7 +1258,7 @@ class CallExpr(Expression):
         self.arg_names = arg_names
         self.analyzed = analyzed
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_call_expr(self)
 
 
@@ -1258,7 +1268,7 @@ class YieldFromExpr(Expression):
     def __init__(self, expr: Expression) -> None:
         self.expr = expr
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_yield_from_expr(self)
 
 
@@ -1268,7 +1278,7 @@ class YieldExpr(Expression):
     def __init__(self, expr: Optional[Expression]) -> None:
         self.expr = expr
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_yield_expr(self)
 
 
@@ -1295,7 +1305,7 @@ class IndexExpr(Expression):
             self.literal_hash = ('Index', base.literal_hash,
                                  index.literal_hash)
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_index_expr(self)
 
 
@@ -1313,7 +1323,7 @@ class UnaryExpr(Expression):
         self.literal = self.expr.literal
         self.literal_hash = ('Unary', op, expr.literal_hash)
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_unary_expr(self)
 
 
@@ -1396,7 +1406,7 @@ class OpExpr(Expression):
         self.literal = min(self.left.literal, self.right.literal)
         self.literal_hash = ('Binary', op, left.literal_hash, right.literal_hash)
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_op_expr(self)
 
 
@@ -1416,7 +1426,7 @@ class ComparisonExpr(Expression):
         self.literal_hash = ((cast(Any, 'Comparison'),) + tuple(operators) +
                              tuple(o.literal_hash for o in operands))
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_comparison_expr(self)
 
 
@@ -1437,7 +1447,7 @@ class SliceExpr(Expression):
         self.end_index = end_index
         self.stride = stride
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_slice_expr(self)
 
 
@@ -1451,7 +1461,7 @@ class CastExpr(Expression):
         self.expr = expr
         self.type = typ
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_cast_expr(self)
 
 
@@ -1463,7 +1473,7 @@ class RevealTypeExpr(Expression):
     def __init__(self, expr: Expression) -> None:
         self.expr = expr
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_reveal_type_expr(self)
 
 
@@ -1476,7 +1486,7 @@ class SuperExpr(Expression):
     def __init__(self, name: str) -> None:
         self.name = name
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_super_expr(self)
 
 
@@ -1491,7 +1501,7 @@ class FuncExpr(FuncItem, Expression):
         ret = cast(ReturnStmt, self.body.body[-1])
         return ret.expr
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_func_expr(self)
 
 
@@ -1506,7 +1516,7 @@ class ListExpr(Expression):
             self.literal = LITERAL_YES
             self.literal_hash = (cast(Any, 'List'),) + tuple(x.literal_hash for x in items)
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_list_expr(self)
 
 
@@ -1525,7 +1535,7 @@ class DictExpr(Expression):
             self.literal_hash = (cast(Any, 'Dict'),) + tuple(
                 (x[0].literal_hash, x[1].literal_hash) for x in items)
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_dict_expr(self)
 
 
@@ -1540,7 +1550,7 @@ class TupleExpr(Expression):
             self.literal = LITERAL_YES
             self.literal_hash = (cast(Any, 'Tuple'),) + tuple(x.literal_hash for x in items)
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_tuple_expr(self)
 
 
@@ -1555,7 +1565,7 @@ class SetExpr(Expression):
             self.literal = LITERAL_YES
             self.literal_hash = (cast(Any, 'Set'),) + tuple(x.literal_hash for x in items)
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_set_expr(self)
 
 
@@ -1574,7 +1584,7 @@ class GeneratorExpr(Expression):
         self.condlists = condlists
         self.indices = indices
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_generator_expr(self)
 
 
@@ -1586,7 +1596,7 @@ class ListComprehension(Expression):
     def __init__(self, generator: GeneratorExpr) -> None:
         self.generator = generator
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_list_comprehension(self)
 
 
@@ -1598,7 +1608,7 @@ class SetComprehension(Expression):
     def __init__(self, generator: GeneratorExpr) -> None:
         self.generator = generator
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_set_comprehension(self)
 
 
@@ -1619,7 +1629,7 @@ class DictionaryComprehension(Expression):
         self.condlists = condlists
         self.indices = indices
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_dictionary_comprehension(self)
 
 
@@ -1635,7 +1645,7 @@ class ConditionalExpr(Expression):
         self.if_expr = if_expr
         self.else_expr = else_expr
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_conditional_expr(self)
 
 
@@ -1647,7 +1657,7 @@ class BackquoteExpr(Expression):
     def __init__(self, expr: Expression) -> None:
         self.expr = expr
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_backquote_expr(self)
 
 
@@ -1661,7 +1671,7 @@ class TypeApplication(Expression):
         self.expr = expr
         self.types = types
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_type_application(self)
 
 
@@ -1712,7 +1722,7 @@ class TypeVarExpr(SymbolNode, Expression):
     def fullname(self) -> str:
         return self._fullname
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_type_var_expr(self)
 
     def serialize(self) -> JsonDict:
@@ -1751,7 +1761,7 @@ class TypeAliasExpr(Expression):
         self.fallback = fallback
         self.in_runtime = in_runtime
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_type_alias_expr(self)
 
 
@@ -1765,7 +1775,7 @@ class NamedTupleExpr(Expression):
     def __init__(self, info: 'TypeInfo') -> None:
         self.info = info
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_namedtuple_expr(self)
 
 
@@ -1778,7 +1788,7 @@ class TypedDictExpr(Expression):
     def __init__(self, info: 'TypeInfo') -> None:
         self.info = info
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_typeddict_expr(self)
 
 
@@ -1790,7 +1800,7 @@ class PromoteExpr(Expression):
     def __init__(self, type: 'mypy.types.Type') -> None:
         self.type = type
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit__promote_expr(self)
 
 
@@ -1805,7 +1815,7 @@ class NewTypeExpr(Expression):
         self.name = name
         self.old_type = old_type
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_newtype_expr(self)
 
 
@@ -1817,7 +1827,7 @@ class AwaitExpr(Expression):
     def __init__(self, expr: Expression) -> None:
         self.expr = expr
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_await_expr(self)
 
 
@@ -1840,7 +1850,7 @@ class TempNode(Expression):
     def __repr__(self) -> str:
         return 'TempNode(%s)' % str(self.type)
 
-    def accept(self, visitor: NodeVisitor[T]) -> T:
+    def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_temp_node(self)
 
 
diff --git a/mypy/options.py b/mypy/options.py
index a9b6a05..77f9713 100644
--- a/mypy/options.py
+++ b/mypy/options.py
@@ -27,6 +27,7 @@ class Options:
         "show_none_errors",
         "warn_no_return",
         "ignore_errors",
+        "strict_boolean",
     }
 
     OPTIONS_AFFECTING_CACHE = PER_MODULE_OPTIONS | {"strict_optional"}
@@ -70,6 +71,9 @@ class Options:
         # Files in which to ignore all non-fatal errors
         self.ignore_errors = False
 
+        # Only allow booleans in conditions
+        self.strict_boolean = False
+
         # Apply strict None checking
         self.strict_optional = False
 
@@ -110,7 +114,7 @@ class Options:
         self.use_builtins_fixtures = False
 
         # -- experimental options --
-        self.fast_parser = False
+        self.fast_parser = True
         self.incremental = False
         self.cache_dir = defaults.CACHE_DIR
         self.debug_cache = False
diff --git a/mypy/sametypes.py b/mypy/sametypes.py
index 4878273..e3cc561 100644
--- a/mypy/sametypes.py
+++ b/mypy/sametypes.py
@@ -123,9 +123,18 @@ class SameTypeVisitor(TypeVisitor[bool]):
             return False
 
     def visit_union_type(self, left: UnionType) -> bool:
-        # XXX This is a test for syntactic equality, not equivalence
         if isinstance(self.right, UnionType):
-            return is_same_types(left.items, self.right.items)
+            # Check that everything in left is in right
+            for left_item in left.items:
+                if not any(is_same_type(left_item, right_item) for right_item in self.right.items):
+                    return False
+
+            # Check that everything in right is in left
+            for right_item in self.right.items:
+                if not any(is_same_type(right_item, left_item) for left_item in left.items):
+                    return False
+
+            return True
         else:
             return False
 
diff --git a/mypy/semanal.py b/mypy/semanal.py
index 1db35f0..71a8323 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -67,6 +67,7 @@ from mypy.nodes import (
     IntExpr, FloatExpr, UnicodeExpr, EllipsisExpr, TempNode,
     COVARIANT, CONTRAVARIANT, INVARIANT, UNBOUND_IMPORTED, LITERAL_YES,
 )
+from mypy.typevars import has_no_typevars, fill_typevars
 from mypy.visitor import NodeVisitor
 from mypy.traverser import TraverserVisitor
 from mypy.errors import Errors, report_internal_error
@@ -79,7 +80,6 @@ from mypy.nodes import implicit_module_attrs
 from mypy.typeanal import TypeAnalyser, TypeAnalyserPass3, analyze_type_alias
 from mypy.exprtotype import expr_to_unanalyzed_type, TypeTranslationError
 from mypy.sametypes import is_same_type
-from mypy.erasetype import erase_typevars
 from mypy.options import Options
 from mypy import join
 
@@ -1404,10 +1404,11 @@ class SemanticAnalyzer(NodeVisitor):
                 self.fail('Tuple type expected for multiple variables',
                           lvalue)
         elif isinstance(lvalue, StarExpr):
+            # Historical behavior for the old parser
             if isinstance(typ, StarType):
                 self.store_declared_types(lvalue.expr, typ.type)
             else:
-                self.fail('Star type expected for starred expression', lvalue)
+                self.store_declared_types(lvalue.expr, typ)
         else:
             # This has been flagged elsewhere as an error, so just ignore here.
             pass
@@ -2067,6 +2068,8 @@ class SemanticAnalyzer(NodeVisitor):
     def visit_assert_stmt(self, s: AssertStmt) -> None:
         if s.expr:
             s.expr.accept(self)
+        if s.msg:
+            s.msg.accept(self)
 
     def visit_operator_assignment_stmt(self,
                                        s: OperatorAssignmentStmt) -> None:
@@ -2087,7 +2090,11 @@ class SemanticAnalyzer(NodeVisitor):
         s.expr.accept(self)
 
         # Bind index variables and check if they define new names.
-        self.analyze_lvalue(s.index)
+        self.analyze_lvalue(s.index, explicit_type=s.index_type is not None)
+        if s.index_type:
+            allow_tuple_literal = isinstance(s.index, (TupleExpr, ListExpr))
+            s.index_type = self.anal_type(s.index_type, allow_tuple_literal)
+            self.store_declared_types(s.index, s.index_type)
 
         self.loop_depth += 1
         self.visit_block(s.body)
@@ -2130,10 +2137,48 @@ class SemanticAnalyzer(NodeVisitor):
             s.finally_body.accept(visitor)
 
     def visit_with_stmt(self, s: WithStmt) -> None:
+        types = []  # type: List[Type]
+
+        if s.target_type:
+            actual_targets = [t for t in s.target if t is not None]
+            if len(actual_targets) == 0:
+                # We have a type for no targets
+                self.fail('Invalid type comment', s)
+            elif len(actual_targets) == 1:
+                # We have one target and one type
+                types = [s.target_type]
+            elif isinstance(s.target_type, TupleType):
+                # We have multiple targets and multiple types
+                if len(actual_targets) == len(s.target_type.items):
+                    types = s.target_type.items
+                else:
+                    # But it's the wrong number of items
+                    self.fail('Incompatible number of types for `with` targets', s)
+            else:
+                # We have multiple targets and one type
+                self.fail('Multiple types expected for multiple `with` targets', s)
+
+        new_types = []  # type: List[Type]
         for e, n in zip(s.expr, s.target):
             e.accept(self)
             if n:
-                self.analyze_lvalue(n)
+                self.analyze_lvalue(n, explicit_type=s.target_type is not None)
+
+                # Since we have a target, pop the next type from types
+                if types:
+                    t = types.pop(0)
+                    allow_tuple_literal = isinstance(n, (TupleExpr, ListExpr))
+                    t = self.anal_type(t, allow_tuple_literal)
+                    new_types.append(t)
+                    self.store_declared_types(n, t)
+
+        # Reverse the logic above to correctly reassign target_type
+        if new_types:
+            if len(s.target) == 1:
+                s.target_type = new_types[0]
+            elif isinstance(s.target_type, TupleType):
+                s.target_type = s.target_type.copy_modified(items=new_types)
+
         self.visit_block(s.body)
 
     def visit_del_stmt(self, s: DelStmt) -> None:
@@ -2254,7 +2299,7 @@ class SemanticAnalyzer(NodeVisitor):
         """Analyze a call expression.
 
         Some call expressions are recognized as special forms, including
-        cast(...) and Any(...).
+        cast(...).
         """
         expr.callee.accept(self)
         if refers_to_fullname(expr.callee, 'typing.cast'):
@@ -2280,12 +2325,8 @@ class SemanticAnalyzer(NodeVisitor):
             expr.analyzed.column = expr.column
             expr.analyzed.accept(self)
         elif refers_to_fullname(expr.callee, 'typing.Any'):
-            # Special form Any(...).
-            if not self.check_fixed_args(expr, 1, 'Any'):
-                return
-            expr.analyzed = CastExpr(expr.args[0], AnyType())
-            expr.analyzed.line = expr.line
-            expr.analyzed.accept(self)
+            # Special form Any(...) no longer supported.
+            self.fail('Any(...) is no longer supported. Use cast(Any, ...) instead', expr)
         elif refers_to_fullname(expr.callee, 'typing._promote'):
             # Special form _promote(...).
             if not self.check_fixed_args(expr, 1, '_promote'):
@@ -2363,7 +2404,7 @@ class SemanticAnalyzer(NodeVisitor):
             # In this case base.node is the module's MypyFile and we look up
             # bar in its namespace.  This must be done for all types of bar.
             file = base.node
-            assert isinstance(file, MypyFile)
+            assert isinstance(file, (MypyFile, type(None)))
             n = file.names.get(expr.name, None) if file is not None else None
             if n:
                 n = self.normalize_type_alias(n, expr)
@@ -2828,7 +2869,17 @@ class FirstPass(NodeVisitor):
 
         # Add implicit definitions of module '__name__' etc.
         for name, t in implicit_module_attrs.items():
-            v = Var(name, UnboundType(t))
+            # unicode docstrings should be accepted in Python 2
+            if name == '__doc__':
+                if self.pyversion >= (3, 0):
+                    typ = UnboundType('__builtins__.str')  # type: Type
+                else:
+                    typ = UnionType([UnboundType('__builtins__.str'),
+                                     UnboundType('__builtins__.unicode')])
+            else:
+                assert t is not None, 'type should be specified for {}'.format(name)
+                typ = UnboundType(t)
+            v = Var(name, typ)
             v._fullname = self.sem.qualified_name(name)
             self.sem.globals[name] = SymbolTableNode(GDEF, v, self.sem.cur_mod_id)
 
@@ -2979,7 +3030,7 @@ class FirstPass(NodeVisitor):
 
     def visit_for_stmt(self, s: ForStmt) -> None:
         if self.sem.is_module_scope():
-            self.analyze_lvalue(s.index)
+            self.analyze_lvalue(s.index, explicit_type=s.index_type is not None)
             s.body.accept(self)
             if s.else_body:
                 s.else_body.accept(self)
@@ -2988,7 +3039,7 @@ class FirstPass(NodeVisitor):
         if self.sem.is_module_scope():
             for n in s.target:
                 if n:
-                    self.analyze_lvalue(n)
+                    self.analyze_lvalue(n, explicit_type=s.target_type is not None)
             s.body.accept(self)
 
     def visit_decorator(self, d: Decorator) -> None:
@@ -3153,19 +3204,6 @@ class ThirdPass(TraverserVisitor):
         return Instance(sym.node, args or [])
 
 
-def fill_typevars(typ: TypeInfo) -> Union[Instance, TupleType]:
-    """For a non-generic type, return instance type representing the type.
-    For a generic G type with parameters T1, .., Tn, return G[T1, ..., Tn].
-    """
-    tv = []  # type: List[Type]
-    for i in range(len(typ.type_vars)):
-        tv.append(TypeVarType(typ.defn.type_vars[i]))
-    inst = Instance(typ, tv)
-    if typ.tuple_type is None:
-        return inst
-    return typ.tuple_type.copy_modified(fallback=inst)
-
-
 def replace_implicit_first_type(sig: FunctionLike, new: Type) -> FunctionLike:
     if isinstance(sig, CallableType):
         return sig.copy_modified(arg_types=[new] + sig.arg_types[1:])
@@ -3537,7 +3575,3 @@ def find_fixed_callable_return(expr: Expression) -> Optional[CallableType]:
             if isinstance(t.ret_type, CallableType):
                 return t.ret_type
     return None
-
-
-def has_no_typevars(typ: Type) -> bool:
-    return is_same_type(typ, erase_typevars(typ))
diff --git a/mypy/strconv.py b/mypy/strconv.py
index 68efaeb..d7c1e48 100644
--- a/mypy/strconv.py
+++ b/mypy/strconv.py
@@ -199,7 +199,10 @@ class StrConv(NodeVisitor[str]):
         a = []  # type: List[Any]
         if o.is_async:
             a.append(('Async', ''))
-        a.extend([o.index, o.expr, o.body])
+        a.append(o.index)
+        if o.index_type:
+            a.append(o.index_type)
+        a.extend([o.expr, o.body])
         if o.else_body:
             a.append(('Else', o.else_body.body))
         return self.dump(a, o)
@@ -231,7 +234,10 @@ class StrConv(NodeVisitor[str]):
         return self.dump([o.expr, o.from_expr], o)
 
     def visit_assert_stmt(self, o: 'mypy.nodes.AssertStmt') -> str:
-        return self.dump([o.expr], o)
+        if o.msg is not None:
+            return self.dump([o.expr, o.msg], o)
+        else:
+            return self.dump([o.expr], o)
 
     def visit_await_expr(self, o: 'mypy.nodes.AwaitExpr') -> str:
         return self.dump([o.expr], o)
@@ -263,6 +269,8 @@ class StrConv(NodeVisitor[str]):
             a.append(('Expr', [o.expr[i]]))
             if o.target[i]:
                 a.append(('Target', [o.target[i]]))
+        if o.target_type:
+            a.append(o.target_type)
         return self.dump(a + [o.body], o)
 
     def visit_print_stmt(self, o: 'mypy.nodes.PrintStmt') -> str:
diff --git a/typeshed/stdlib/2/xml/__init__.pyi b/mypy/test/__init__.py
similarity index 100%
rename from typeshed/stdlib/2/xml/__init__.pyi
rename to mypy/test/__init__.py
diff --git a/typeshed/stdlib/2/xml/etree/__init__.pyi b/mypy/test/collect.py
similarity index 100%
rename from typeshed/stdlib/2/xml/etree/__init__.pyi
rename to mypy/test/collect.py
diff --git a/mypy/test/config.py b/mypy/test/config.py
new file mode 100644
index 0000000..681f866
--- /dev/null
+++ b/mypy/test/config.py
@@ -0,0 +1,19 @@
+import os
+import os.path
+
+import typing
+
+
+this_file_dir = os.path.dirname(os.path.realpath(__file__))
+PREFIX = os.path.dirname(os.path.dirname(this_file_dir))
+
+# Location of test data files such as test case descriptions.
+test_data_prefix = os.path.join(PREFIX, 'test-data', 'unit')
+
+assert os.path.isdir(test_data_prefix), \
+    'Test data prefix ({}) not set correctly'.format(test_data_prefix)
+
+# Temp directory used for the temp files created when running test cases.
+# This is *within* the tempfile.TemporaryDirectory that is chroot'ed per testcase.
+# It is also hard-coded in numerous places, so don't change it.
+test_temp_dir = 'tmp'
diff --git a/mypy/test/data.py b/mypy/test/data.py
new file mode 100644
index 0000000..8fa64a5
--- /dev/null
+++ b/mypy/test/data.py
@@ -0,0 +1,483 @@
+"""Utilities for processing .test files containing test case descriptions."""
+
+import os.path
+import os
+import re
+from os import remove, rmdir
+import shutil
+
+import pytest  # type: ignore  # no pytest in typeshed
+from typing import Callable, List, Tuple, Set, Optional, Iterator, Any
+
+from mypy.myunit import TestCase, SkipTestCaseException
+
+
+def parse_test_cases(
+        path: str,
+        perform: Optional[Callable[['DataDrivenTestCase'], None]],
+        base_path: str = '.',
+        optional_out: bool = False,
+        include_path: str = None,
+        native_sep: bool = False) -> List['DataDrivenTestCase']:
+    """Parse a file with test case descriptions.
+
+    Return an array of test cases.
+
+    NB this function and DataDrivenTestCase are shared between the
+    myunit and pytest codepaths -- if something looks redundant,
+    that's likely the reason.
+    """
+
+    if not include_path:
+        include_path = os.path.dirname(path)
+    with open(path, encoding='utf-8') as f:
+        l = f.readlines()
+    for i in range(len(l)):
+        l[i] = l[i].rstrip('\n')
+    p = parse_test_data(l, path)
+    out = []  # type: List[DataDrivenTestCase]
+
+    # Process the parsed items. Each item has a header of form [id args],
+    # optionally followed by lines of text.
+    i = 0
+    while i < len(p):
+        ok = False
+        i0 = i
+        if p[i].id == 'case':
+            i += 1
+
+            files = []  # type: List[Tuple[str, str]] # path and contents
+            output_files = []  # type: List[Tuple[str, str]] # path and contents for output files
+            tcout = []  # type: List[str]  # Regular output errors
+            tcout2 = []  # type: List[str]  # Output errors for incremental, second run
+            stale_modules = None  # type: Optional[Set[str]]  # module names
+            rechecked_modules = None  # type: Optional[Set[str]]  # module names
+            while i < len(p) and p[i].id != 'case':
+                if p[i].id == 'file' or p[i].id == 'outfile':
+                    # Record an extra file needed for the test case.
+                    arg = p[i].arg
+                    assert arg is not None
+                    file_entry = (os.path.join(base_path, arg), '\n'.join(p[i].data))
+                    if p[i].id == 'file':
+                        files.append(file_entry)
+                    elif p[i].id == 'outfile':
+                        output_files.append(file_entry)
+                elif p[i].id in ('builtins', 'builtins_py2'):
+                    # Use a custom source file for the std module.
+                    arg = p[i].arg
+                    assert arg is not None
+                    mpath = os.path.join(os.path.dirname(path), arg)
+                    if p[i].id == 'builtins':
+                        fnam = 'builtins.pyi'
+                    else:
+                        # Python 2
+                        fnam = '__builtin__.pyi'
+                    with open(mpath) as f:
+                        files.append((os.path.join(base_path, fnam), f.read()))
+                elif p[i].id == 'stale':
+                    arg = p[i].arg
+                    if arg is None:
+                        stale_modules = set()
+                    else:
+                        stale_modules = {item.strip() for item in arg.split(',')}
+                elif p[i].id == 'rechecked':
+                    arg = p[i].arg
+                    if arg is None:
+                        rechecked_modules = set()
+                    else:
+                        rechecked_modules = {item.strip() for item in arg.split(',')}
+                elif p[i].id == 'out' or p[i].id == 'out1':
+                    tcout = p[i].data
+                    if native_sep and os.path.sep == '\\':
+                        tcout = [fix_win_path(line) for line in tcout]
+                    ok = True
+                elif p[i].id == 'out2':
+                    tcout2 = p[i].data
+                    if native_sep and os.path.sep == '\\':
+                        tcout2 = [fix_win_path(line) for line in tcout2]
+                    ok = True
+                else:
+                    raise ValueError(
+                        'Invalid section header {} in {} at line {}'.format(
+                            p[i].id, path, p[i].line))
+                i += 1
+
+            if rechecked_modules is None:
+                # If the set of rechecked modules isn't specified, make it the same as the set of
+                # modules with a stale public interface.
+                rechecked_modules = stale_modules
+            if (stale_modules is not None
+                    and rechecked_modules is not None
+                    and not stale_modules.issubset(rechecked_modules)):
+                raise ValueError(
+                    'Stale modules must be a subset of rechecked modules ({})'.format(path))
+
+            if optional_out:
+                ok = True
+
+            if ok:
+                input = expand_includes(p[i0].data, include_path)
+                expand_errors(input, tcout, 'main')
+                lastline = p[i].line if i < len(p) else p[i - 1].line + 9999
+                tc = DataDrivenTestCase(p[i0].arg, input, tcout, tcout2, path,
+                                        p[i0].line, lastline, perform,
+                                        files, output_files, stale_modules,
+                                        rechecked_modules)
+                out.append(tc)
+        if not ok:
+            raise ValueError(
+                '{}, line {}: Error in test case description'.format(
+                    path, p[i0].line))
+
+    return out
+
+
+class DataDrivenTestCase(TestCase):
+    input = None  # type: List[str]
+    output = None  # type: List[str]
+
+    file = ''
+    line = 0
+
+    # (file path, file content) tuples
+    files = None  # type: List[Tuple[str, str]]
+    expected_stale_modules = None  # type: Optional[Set[str]]
+
+    clean_up = None  # type: List[Tuple[bool, str]]
+
+    def __init__(self,
+                 name: str,
+                 input: List[str],
+                 output: List[str],
+                 output2: List[str],
+                 file: str,
+                 line: int,
+                 lastline: int,
+                 perform: Callable[['DataDrivenTestCase'], None],
+                 files: List[Tuple[str, str]],
+                 output_files: List[Tuple[str, str]],
+                 expected_stale_modules: Optional[Set[str]],
+                 expected_rechecked_modules: Optional[Set[str]],
+                 ) -> None:
+        super().__init__(name)
+        self.input = input
+        self.output = output
+        self.output2 = output2
+        self.lastline = lastline
+        self.file = file
+        self.line = line
+        self.perform = perform
+        self.files = files
+        self.output_files = output_files
+        self.expected_stale_modules = expected_stale_modules
+        self.expected_rechecked_modules = expected_rechecked_modules
+
+    def set_up(self) -> None:
+        super().set_up()
+        encountered_files = set()
+        self.clean_up = []
+        for path, content in self.files:
+            dir = os.path.dirname(path)
+            for d in self.add_dirs(dir):
+                self.clean_up.append((True, d))
+            with open(path, 'w') as f:
+                f.write(content)
+            self.clean_up.append((False, path))
+            encountered_files.add(path)
+            if path.endswith(".next"):
+                # Make sure new files introduced in the second run are accounted for
+                renamed_path = path[:-5]
+                if renamed_path not in encountered_files:
+                    encountered_files.add(renamed_path)
+                    self.clean_up.append((False, renamed_path))
+        for path, _ in self.output_files:
+            # Create directories for expected output and mark them to be cleaned up at the end
+            # of the test case.
+            dir = os.path.dirname(path)
+            for d in self.add_dirs(dir):
+                self.clean_up.append((True, d))
+            self.clean_up.append((False, path))
+
+    def add_dirs(self, dir: str) -> List[str]:
+        """Add all subdirectories required to create dir.
+
+        Return an array of the created directories in the order of creation.
+        """
+        if dir == '' or os.path.isdir(dir):
+            return []
+        else:
+            dirs = self.add_dirs(os.path.dirname(dir)) + [dir]
+            os.mkdir(dir)
+            return dirs
+
+    def run(self) -> None:
+        if self.name.endswith('-skip'):
+            raise SkipTestCaseException()
+        else:
+            self.perform(self)
+
+    def tear_down(self) -> None:
+        # First remove files.
+        for is_dir, path in reversed(self.clean_up):
+            if not is_dir:
+                remove(path)
+        # Then remove directories.
+        for is_dir, path in reversed(self.clean_up):
+            if is_dir:
+                pycache = os.path.join(path, '__pycache__')
+                if os.path.isdir(pycache):
+                    shutil.rmtree(pycache)
+                try:
+                    rmdir(path)
+                except OSError as error:
+                    print(' ** Error removing directory %s -- contents:' % path)
+                    for item in os.listdir(path):
+                        print('  ', item)
+                    # Most likely, there are some files in the
+                    # directory. Use rmtree to nuke the directory, but
+                    # fail the test case anyway, since this seems like
+                    # a bug in a test case -- we shouldn't leave
+                    # garbage lying around. By nuking the directory,
+                    # the next test run hopefully passes.
+                    path = error.filename
+                    # Be defensive -- only call rmtree if we're sure we aren't removing anything
+                    # valuable.
+                    if path.startswith('tmp/') and os.path.isdir(path):
+                        shutil.rmtree(path)
+                    raise
+        super().tear_down()
+
+
+class TestItem:
+    """Parsed test caseitem.
+
+    An item is of the form
+      [id arg]
+      .. data ..
+    """
+
+    id = ''
+    arg = ''  # type: Optional[str]
+
+    # Text data, array of 8-bit strings
+    data = None  # type: List[str]
+
+    file = ''
+    line = 0  # Line number in file
+
+    def __init__(self, id: str, arg: Optional[str], data: List[str], file: str,
+                 line: int) -> None:
+        self.id = id
+        self.arg = arg
+        self.data = data
+        self.file = file
+        self.line = line
+
+
+def parse_test_data(l: List[str], fnam: str) -> List[TestItem]:
+    """Parse a list of lines that represent a sequence of test items."""
+
+    ret = []  # type: List[TestItem]
+    data = []  # type: List[str]
+
+    id = None  # type: Optional[str]
+    arg = None  # type: Optional[str]
+
+    i = 0
+    i0 = 0
+    while i < len(l):
+        s = l[i].strip()
+
+        if l[i].startswith('[') and s.endswith(']') and not s.startswith('[['):
+            if id:
+                data = collapse_line_continuation(data)
+                data = strip_list(data)
+                ret.append(TestItem(id, arg, strip_list(data), fnam, i0 + 1))
+            i0 = i
+            id = s[1:-1]
+            arg = None
+            if ' ' in id:
+                arg = id[id.index(' ') + 1:]
+                id = id[:id.index(' ')]
+            data = []
+        elif l[i].startswith('[['):
+            data.append(l[i][1:])
+        elif not l[i].startswith('--'):
+            data.append(l[i])
+        elif l[i].startswith('----'):
+            data.append(l[i][2:])
+        i += 1
+
+    # Process the last item.
+    if id:
+        data = collapse_line_continuation(data)
+        data = strip_list(data)
+        ret.append(TestItem(id, arg, data, fnam, i0 + 1))
+
+    return ret
+
+
+def strip_list(l: List[str]) -> List[str]:
+    """Return a stripped copy of l.
+
+    Strip whitespace at the end of all lines, and strip all empty
+    lines from the end of the array.
+    """
+
+    r = []  # type: List[str]
+    for s in l:
+        # Strip spaces at end of line
+        r.append(re.sub(r'\s+$', '', s))
+
+    while len(r) > 0 and r[-1] == '':
+        r.pop()
+
+    return r
+
+
+def collapse_line_continuation(l: List[str]) -> List[str]:
+    r = []  # type: List[str]
+    cont = False
+    for s in l:
+        ss = re.sub(r'\\$', '', s)
+        if cont:
+            r[-1] += re.sub('^ +', '', ss)
+        else:
+            r.append(ss)
+        cont = s.endswith('\\')
+    return r
+
+
+def expand_includes(a: List[str], base_path: str) -> List[str]:
+    """Expand @includes within a list of lines.
+
+    Replace all lies starting with @include with the contents of the
+    file name following the prefix. Look for the files in base_path.
+    """
+
+    res = []  # type: List[str]
+    for s in a:
+        if s.startswith('@include '):
+            fn = s.split(' ', 1)[1].strip()
+            with open(os.path.join(base_path, fn)) as f:
+                res.extend(f.readlines())
+        else:
+            res.append(s)
+    return res
+
+
+def expand_errors(input: List[str], output: List[str], fnam: str) -> None:
+    """Transform comments such as '# E: message' or
+    '# E:3: message' in input.
+
+    The result is lines like 'fnam:line: error: message'.
+    """
+
+    for i in range(len(input)):
+        # The first in the split things isn't a comment
+        for possible_err_comment in input[i].split('#')[1:]:
+            m = re.search(
+                '^([ENW]):((?P<col>\d+):)? (?P<message>.*)$',
+                possible_err_comment.strip())
+            if m:
+                if m.group(1) == 'E':
+                    severity = 'error'
+                elif m.group(1) == 'N':
+                    severity = 'note'
+                elif m.group(1) == 'W':
+                    severity = 'warning'
+                col = m.group('col')
+                if col is None:
+                    output.append(
+                        '{}:{}: {}: {}'.format(fnam, i + 1, severity, m.group('message')))
+                else:
+                    output.append('{}:{}:{}: {}: {}'.format(
+                        fnam, i + 1, col, severity, m.group('message')))
+
+
+def fix_win_path(line: str) -> str:
+    r"""Changes paths to Windows paths in error messages.
+
+    E.g. foo/bar.py -> foo\bar.py.
+    """
+    m = re.match(r'^([\S/]+):(\d+:)?(\s+.*)', line)
+    if not m:
+        return line
+    else:
+        filename, lineno, message = m.groups()
+        return '{}:{}{}'.format(filename.replace('/', '\\'),
+                                lineno or '', message)
+
+
+##
+#
+# pytest setup
+#
+##
+
+
+def pytest_addoption(parser: Any) -> None:
+    group = parser.getgroup('mypy')
+    group.addoption('--update-data', action='store_true', default=False,
+                    help='Update test data to reflect actual output'
+                         ' (supported only for certain tests)')
+
+
+def pytest_pycollect_makeitem(collector: Any, name: str, obj: Any) -> Any:
+    if not isinstance(obj, type) or not issubclass(obj, DataSuite):
+        return None
+    return MypyDataSuite(name, parent=collector)
+
+
+class MypyDataSuite(pytest.Class):  # type: ignore  # inheriting from Any
+    def collect(self) -> Iterator['MypyDataCase']:
+        for case in self.obj.cases():
+            yield MypyDataCase(case.name, self, case)
+
+
+class MypyDataCase(pytest.Item):  # type: ignore  # inheriting from Any
+    def __init__(self, name: str, parent: MypyDataSuite, obj: DataDrivenTestCase) -> None:
+        self.skip = False
+        if name.endswith('-skip'):
+            self.skip = True
+            name = name[:-len('-skip')]
+
+        super().__init__(name, parent)
+        self.obj = obj
+
+    def runtest(self) -> None:
+        if self.skip:
+            pytest.skip()
+        update_data = self.config.getoption('--update-data', False)
+        self.parent.obj(update_data=update_data).run_case(self.obj)
+
+    def setup(self) -> None:
+        self.obj.set_up()
+
+    def teardown(self) -> None:
+        self.obj.tear_down()
+
+    def reportinfo(self) -> Tuple[str, int, str]:
+        return self.obj.file, self.obj.line, self.obj.name
+
+    def repr_failure(self, excinfo: Any) -> str:
+        if excinfo.errisinstance(SystemExit):
+            # We assume that before doing exit() (which raises SystemExit) we've printed
+            # enough context about what happened so that a stack trace is not useful.
+            # In particular, uncaught exceptions during semantic analysis or type checking
+            # call exit() and they already print out a stack trace.
+            excrepr = excinfo.exconly()
+        else:
+            self.parent._prunetraceback(excinfo)
+            excrepr = excinfo.getrepr(style='short')
+
+        return "data: {}:{}:\n{}".format(self.obj.file, self.obj.line, excrepr)
+
+
+class DataSuite:
+    @classmethod
+    def cases(cls) -> List[DataDrivenTestCase]:
+        return []
+
+    def run_case(self, testcase: DataDrivenTestCase) -> None:
+        raise NotImplementedError
diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py
new file mode 100644
index 0000000..32d889b
--- /dev/null
+++ b/mypy/test/helpers.py
@@ -0,0 +1,285 @@
+import sys
+import re
+import os
+
+from typing import List, Dict, Tuple
+
+from mypy import defaults
+from mypy.myunit import AssertionFailure
+from mypy.test.data import DataDrivenTestCase
+
+
+# AssertStringArraysEqual displays special line alignment helper messages if
+# the first different line has at least this many characters,
+MIN_LINE_LENGTH_FOR_ALIGNMENT = 5
+
+
+def assert_string_arrays_equal(expected: List[str], actual: List[str],
+                               msg: str) -> None:
+    """Assert that two string arrays are equal.
+
+    Display any differences in a human-readable form.
+    """
+
+    actual = clean_up(actual)
+
+    if actual != expected:
+        num_skip_start = num_skipped_prefix_lines(expected, actual)
+        num_skip_end = num_skipped_suffix_lines(expected, actual)
+
+        sys.stderr.write('Expected:\n')
+
+        # If omit some lines at the beginning, indicate it by displaying a line
+        # with '...'.
+        if num_skip_start > 0:
+            sys.stderr.write('  ...\n')
+
+        # Keep track of the first different line.
+        first_diff = -1
+
+        # Display only this many first characters of identical lines.
+        width = 75
+
+        for i in range(num_skip_start, len(expected) - num_skip_end):
+            if i >= len(actual) or expected[i] != actual[i]:
+                if first_diff < 0:
+                    first_diff = i
+                sys.stderr.write('  {:<45} (diff)'.format(expected[i]))
+            else:
+                e = expected[i]
+                sys.stderr.write('  ' + e[:width])
+                if len(e) > width:
+                    sys.stderr.write('...')
+            sys.stderr.write('\n')
+        if num_skip_end > 0:
+            sys.stderr.write('  ...\n')
+
+        sys.stderr.write('Actual:\n')
+
+        if num_skip_start > 0:
+            sys.stderr.write('  ...\n')
+
+        for j in range(num_skip_start, len(actual) - num_skip_end):
+            if j >= len(expected) or expected[j] != actual[j]:
+                sys.stderr.write('  {:<45} (diff)'.format(actual[j]))
+            else:
+                a = actual[j]
+                sys.stderr.write('  ' + a[:width])
+                if len(a) > width:
+                    sys.stderr.write('...')
+            sys.stderr.write('\n')
+        if actual == []:
+            sys.stderr.write('  (empty)\n')
+        if num_skip_end > 0:
+            sys.stderr.write('  ...\n')
+
+        sys.stderr.write('\n')
+
+        if first_diff >= 0 and first_diff < len(actual) and (
+                len(expected[first_diff]) >= MIN_LINE_LENGTH_FOR_ALIGNMENT
+                or len(actual[first_diff]) >= MIN_LINE_LENGTH_FOR_ALIGNMENT):
+            # Display message that helps visualize the differences between two
+            # long lines.
+            show_align_message(expected[first_diff], actual[first_diff])
+
+        raise AssertionFailure(msg)
+
+
+def update_testcase_output(testcase: DataDrivenTestCase, output: List[str]) -> None:
+    testcase_path = os.path.join(testcase.old_cwd, testcase.file)
+    with open(testcase_path) as f:
+        data_lines = f.read().splitlines()
+    test = '\n'.join(data_lines[testcase.line:testcase.lastline])
+
+    mapping = {}  # type: Dict[str, List[str]]
+    for old, new in zip(testcase.output, output):
+        PREFIX = 'error:'
+        ind = old.find(PREFIX)
+        if ind != -1 and old[:ind] == new[:ind]:
+            old, new = old[ind + len(PREFIX):], new[ind + len(PREFIX):]
+        mapping.setdefault(old, []).append(new)
+
+    for old in mapping:
+        if test.count(old) == len(mapping[old]):
+            betweens = test.split(old)
+
+            # Interleave betweens and mapping[old]
+            from itertools import chain
+            interleaved = [betweens[0]] + \
+                list(chain.from_iterable(zip(mapping[old], betweens[1:])))
+            test = ''.join(interleaved)
+
+    data_lines[testcase.line:testcase.lastline] = [test]
+    data = '\n'.join(data_lines)
+    with open(testcase_path, 'w') as f:
+        print(data, file=f)
+
+
+def show_align_message(s1: str, s2: str) -> None:
+    """Align s1 and s2 so that the their first difference is highlighted.
+
+    For example, if s1 is 'foobar' and s2 is 'fobar', display the
+    following lines:
+
+      E: foobar
+      A: fobar
+           ^
+
+    If s1 and s2 are long, only display a fragment of the strings around the
+    first difference. If s1 is very short, do nothing.
+    """
+
+    # Seeing what went wrong is trivial even without alignment if the expected
+    # string is very short. In this case do nothing to simplify output.
+    if len(s1) < 4:
+        return
+
+    maxw = 72  # Maximum number of characters shown
+
+    sys.stderr.write('Alignment of first line difference:\n')
+
+    trunc = False
+    while s1[:30] == s2[:30]:
+        s1 = s1[10:]
+        s2 = s2[10:]
+        trunc = True
+
+    if trunc:
+        s1 = '...' + s1
+        s2 = '...' + s2
+
+    max_len = max(len(s1), len(s2))
+    extra = ''
+    if max_len > maxw:
+        extra = '...'
+
+    # Write a chunk of both lines, aligned.
+    sys.stderr.write('  E: {}{}\n'.format(s1[:maxw], extra))
+    sys.stderr.write('  A: {}{}\n'.format(s2[:maxw], extra))
+    # Write an indicator character under the different columns.
+    sys.stderr.write('     ')
+    for j in range(min(maxw, max(len(s1), len(s2)))):
+        if s1[j:j + 1] != s2[j:j + 1]:
+            sys.stderr.write('^')  # Difference
+            break
+        else:
+            sys.stderr.write(' ')  # Equal
+    sys.stderr.write('\n')
+
+
+def assert_string_arrays_equal_wildcards(expected: List[str],
+                                         actual: List[str],
+                                         msg: str) -> None:
+    # Like above, but let a line with only '...' in expected match any number
+    # of lines in actual.
+    actual = clean_up(actual)
+
+    while actual != [] and actual[-1] == '':
+        actual = actual[:-1]
+
+    # Expand "..." wildcards away.
+    expected = match_array(expected, actual)
+    assert_string_arrays_equal(expected, actual, msg)
+
+
+def clean_up(a: List[str]) -> List[str]:
+    """Remove common directory prefix from all strings in a.
+
+    This uses a naive string replace; it seems to work well enough. Also
+    remove trailing carriage returns.
+    """
+    res = []
+    for s in a:
+        prefix = os.sep
+        ss = s
+        for p in prefix, prefix.replace(os.sep, '/'):
+            if p != '/' and p != '//' and p != '\\' and p != '\\\\':
+                ss = ss.replace(p, '')
+        # Ignore spaces at end of line.
+        ss = re.sub(' +$', '', ss)
+        res.append(re.sub('\\r$', '', ss))
+    return res
+
+
+def match_array(pattern: List[str], target: List[str]) -> List[str]:
+    """Expand '...' wildcards in pattern by matching against target."""
+
+    res = []  # type: List[str]
+    i = 0
+    j = 0
+
+    while i < len(pattern):
+        if pattern[i] == '...':
+            # Wildcard in pattern.
+            if i + 1 == len(pattern):
+                # Wildcard at end of pattern; match the rest of target.
+                res.extend(target[j:])
+                # Finished.
+                break
+            else:
+                # Must find the instance of the next pattern line in target.
+                jj = j
+                while jj < len(target):
+                    if target[jj] == pattern[i + 1]:
+                        break
+                    jj += 1
+                if jj == len(target):
+                    # No match. Get out.
+                    res.extend(pattern[i:])
+                    break
+                res.extend(target[j:jj])
+                i += 1
+                j = jj
+        elif (j < len(target) and (pattern[i] == target[j]
+                                   or (i + 1 < len(pattern)
+                                       and j + 1 < len(target)
+                                       and pattern[i + 1] == target[j + 1]))):
+            # In sync; advance one line. The above condition keeps sync also if
+            # only a single line is different, but loses it if two consecutive
+            # lines fail to match.
+            res.append(pattern[i])
+            i += 1
+            j += 1
+        else:
+            # Out of sync. Get out.
+            res.extend(pattern[i:])
+            break
+    return res
+
+
+def num_skipped_prefix_lines(a1: List[str], a2: List[str]) -> int:
+    num_eq = 0
+    while num_eq < min(len(a1), len(a2)) and a1[num_eq] == a2[num_eq]:
+        num_eq += 1
+    return max(0, num_eq - 4)
+
+
+def num_skipped_suffix_lines(a1: List[str], a2: List[str]) -> int:
+    num_eq = 0
+    while (num_eq < min(len(a1), len(a2))
+           and a1[-num_eq - 1] == a2[-num_eq - 1]):
+        num_eq += 1
+    return max(0, num_eq - 4)
+
+
+def testfile_pyversion(path: str) -> Tuple[int, int]:
+    if path.endswith('python2.test'):
+        return defaults.PYTHON2_VERSION
+    else:
+        return defaults.PYTHON3_VERSION
+
+
+def testcase_pyversion(path: str, testcase_name: str) -> Tuple[int, int]:
+    if testcase_name.endswith('python2'):
+        return defaults.PYTHON2_VERSION
+    else:
+        return testfile_pyversion(path)
+
+
+def normalize_error_messages(messages: List[str]) -> List[str]:
+    """Translate an array of error messages to use / as path separator."""
+
+    a = []
+    for m in messages:
+        a.append(m.replace(os.sep, '/'))
+    return a
diff --git a/mypy/test/testargs.py b/mypy/test/testargs.py
new file mode 100644
index 0000000..4e27e37
--- /dev/null
+++ b/mypy/test/testargs.py
@@ -0,0 +1,18 @@
+"""Ensure the argparse parser and Options class are in sync.
+
+In particular, verify that the argparse defaults are the same as the Options
+defaults, and that argparse doesn't assign any new members to the Options
+object it creates.
+"""
+
+import typing
+from mypy.myunit import Suite, assert_equal
+from mypy.options import Options, BuildType
+from mypy.main import process_options
+
+
+class ArgSuite(Suite):
+    def test_coherence(self) -> None:
+        options = Options()
+        _, parsed_options = process_options([], require_targets=False)
+        assert_equal(options, parsed_options)
diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py
new file mode 100644
index 0000000..6f3b156
--- /dev/null
+++ b/mypy/test/testcheck.py
@@ -0,0 +1,332 @@
+"""Type checker test cases"""
+
+import os.path
+import re
+import shutil
+import sys
+import time
+import typed_ast
+import typed_ast.ast35
+
+from typing import Dict, List, Optional, Set, Tuple
+
+from mypy import build, defaults
+from mypy.main import parse_version, process_options
+from mypy.build import BuildSource, find_module_clear_caches
+from mypy.myunit import AssertionFailure
+from mypy.test.config import test_temp_dir, test_data_prefix
+from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite
+from mypy.test.helpers import (
+    assert_string_arrays_equal, normalize_error_messages,
+    testcase_pyversion, update_testcase_output,
+)
+from mypy.errors import CompileError
+from mypy.options import Options
+
+from mypy import experiments
+
+# List of files that contain test case descriptions.
+files = [
+]
+fast_parser_files = [
+    'check-basic.test',
+    'check-callable.test',
+    'check-classes.test',
+    'check-statements.test',
+    'check-generics.test',
+    'check-dynamic-typing.test',
+    'check-inference.test',
+    'check-inference-context.test',
+    'check-kwargs.test',
+    'check-overloading.test',
+    'check-type-checks.test',
+    'check-abstract.test',
+    'check-multiple-inheritance.test',
+    'check-super.test',
+    'check-modules.test',
+    'check-typevar-values.test',
+    'check-unsupported.test',
+    'check-unreachable-code.test',
+    'check-unions.test',
+    'check-isinstance.test',
+    'check-lists.test',
+    'check-namedtuple.test',
+    'check-typeddict.test',
+    'check-type-aliases.test',
+    'check-ignore.test',
+    'check-type-promotion.test',
+    'check-semanal-error.test',
+    'check-flags.test',
+    'check-incremental.test',
+    'check-bound.test',
+    'check-optional.test',
+    'check-fastparse.test',
+    'check-warnings.test',
+    'check-async-await.test',
+    'check-newtype.test',
+    'check-class-namedtuple.test',
+    'check-selftype.test',
+    'check-python2.test',
+    'check-columns.test',
+    'check-functions.test',
+    'check-tuples.test',
+    'check-expressions.test',
+    'check-generic-subtyping.test',
+    'check-varargs.test',
+]
+
+files.extend(fast_parser_files)
+
+if 'annotation' in typed_ast.ast35.Assign._fields:
+    fast_parser_files.append('check-newsyntax.test')
+
+if 'contains_underscores' in typed_ast.ast35.Num._fields:
+    fast_parser_files.append('check-underscores.test')
+
+
+class TypeCheckSuite(DataSuite):
+    def __init__(self, *, update_data: bool = False) -> None:
+        self.update_data = update_data
+
+    @classmethod
+    def cases(cls) -> List[DataDrivenTestCase]:
+        c = []  # type: List[DataDrivenTestCase]
+        for f in files:
+            c += parse_test_cases(os.path.join(test_data_prefix, f),
+                                  None, test_temp_dir, True)
+        return c
+
+    def run_case(self, testcase: DataDrivenTestCase) -> None:
+        incremental = 'incremental' in testcase.name.lower() or 'incremental' in testcase.file
+        optional = 'optional' in testcase.file
+        if incremental:
+            # Incremental tests are run once with a cold cache, once with a warm cache.
+            # Expect success on first run, errors from testcase.output (if any) on second run.
+            # We briefly sleep to make sure file timestamps are distinct.
+            self.clear_cache()
+            self.run_case_once(testcase, 1)
+            self.run_case_once(testcase, 2)
+        elif optional:
+            try:
+                experiments.STRICT_OPTIONAL = True
+                self.run_case_once(testcase)
+            finally:
+                experiments.STRICT_OPTIONAL = False
+        else:
+            try:
+                old_strict_optional = experiments.STRICT_OPTIONAL
+                self.run_case_once(testcase)
+            finally:
+                experiments.STRICT_OPTIONAL = old_strict_optional
+
+    def clear_cache(self) -> None:
+        dn = defaults.CACHE_DIR
+
+        if os.path.exists(dn):
+            shutil.rmtree(dn)
+
+    def run_case_once(self, testcase: DataDrivenTestCase, incremental: int = 0) -> None:
+        find_module_clear_caches()
+        original_program_text = '\n'.join(testcase.input)
+        module_data = self.parse_module(original_program_text, incremental)
+
+        if incremental:
+            if incremental == 1:
+                # In run 1, copy program text to program file.
+                for module_name, program_path, program_text in module_data:
+                    if module_name == '__main__':
+                        with open(program_path, 'w') as f:
+                            f.write(program_text)
+                        break
+            elif incremental == 2:
+                # In run 2, copy *.next files to * files.
+                for dn, dirs, files in os.walk(os.curdir):
+                    for file in files:
+                        if file.endswith('.next'):
+                            full = os.path.join(dn, file)
+                            target = full[:-5]
+                            shutil.copy(full, target)
+
+                            # In some systems, mtime has a resolution of 1 second which can cause
+                            # annoying-to-debug issues when a file has the same size after a
+                            # change. We manually set the mtime to circumvent this.
+                            new_time = os.stat(target).st_mtime + 1
+                            os.utime(target, times=(new_time, new_time))
+
+        # Parse options after moving files (in case mypy.ini is being moved).
+        options = self.parse_options(original_program_text, testcase)
+        options.use_builtins_fixtures = True
+        options.show_traceback = True
+        if 'optional' in testcase.file:
+            options.strict_optional = True
+        if incremental:
+            options.incremental = True
+        if os.path.split(testcase.file)[1] in fast_parser_files:
+            options.fast_parser = True
+
+        sources = []
+        for module_name, program_path, program_text in module_data:
+            # Always set to none so we're forced to reread the module in incremental mode
+            sources.append(BuildSource(program_path, module_name,
+                                       None if incremental else program_text))
+        res = None
+        try:
+            res = build.build(sources=sources,
+                              options=options,
+                              alt_lib_path=test_temp_dir)
+            a = res.errors
+        except CompileError as e:
+            a = e.messages
+        a = normalize_error_messages(a)
+
+        # Make sure error messages match
+        if incremental == 0:
+            msg = 'Invalid type checker output ({}, line {})'
+            output = testcase.output
+        elif incremental == 1:
+            msg = 'Invalid type checker output in incremental, run 1 ({}, line {})'
+            output = testcase.output
+        elif incremental == 2:
+            msg = 'Invalid type checker output in incremental, run 2 ({}, line {})'
+            output = testcase.output2
+        else:
+            raise AssertionError()
+
+        if output != a and self.update_data:
+            update_testcase_output(testcase, a)
+        assert_string_arrays_equal(output, a, msg.format(testcase.file, testcase.line))
+
+        if incremental and res:
+            if options.follow_imports == 'normal' and testcase.output is None:
+                self.verify_cache(module_data, a, res.manager)
+            if incremental == 2:
+                self.check_module_equivalence(
+                    'rechecked',
+                    testcase.expected_rechecked_modules,
+                    res.manager.rechecked_modules)
+                self.check_module_equivalence(
+                    'stale',
+                    testcase.expected_stale_modules,
+                    res.manager.stale_modules)
+
+    def check_module_equivalence(self, name: str,
+                                 expected: Optional[Set[str]], actual: Set[str]) -> None:
+        if expected is not None:
+            assert_string_arrays_equal(
+                list(sorted(expected)),
+                list(sorted(actual.difference({"__main__"}))),
+                'Set of {} modules does not match expected set'.format(name))
+
+    def verify_cache(self, module_data: List[Tuple[str, str, str]], a: List[str],
+                     manager: build.BuildManager) -> None:
+        # There should be valid cache metadata for each module except
+        # those in error_paths; for those there should not be.
+        #
+        # NOTE: When A imports B and there's an error in B, the cache
+        # data for B is invalidated, but the cache data for A remains.
+        # However build.process_graphs() will ignore A's cache data.
+        #
+        # Also note that when A imports B, and there's an error in A
+        # _due to a valid change in B_, the cache data for B will be
+        # invalidated and updated, but the old cache data for A will
+        # remain unchanged. As before, build.process_graphs() will
+        # ignore A's (old) cache data.
+        error_paths = self.find_error_paths(a)
+        modules = self.find_module_files()
+        modules.update({module_name: path for module_name, path, text in module_data})
+        missing_paths = self.find_missing_cache_files(modules, manager)
+        if not missing_paths.issubset(error_paths):
+            raise AssertionFailure("cache data discrepancy %s != %s" %
+                                   (missing_paths, error_paths))
+
+    def find_error_paths(self, a: List[str]) -> Set[str]:
+        hits = set()
+        for line in a:
+            m = re.match(r'([^\s:]+):\d+: error:', line)
+            if m:
+                p = m.group(1).replace('/', os.path.sep)
+                hits.add(p)
+        return hits
+
+    def find_module_files(self) -> Dict[str, str]:
+        modules = {}
+        for dn, dirs, files in os.walk(test_temp_dir):
+            dnparts = dn.split(os.sep)
+            assert dnparts[0] == test_temp_dir
+            del dnparts[0]
+            for file in files:
+                if file.endswith('.py'):
+                    if file == "__init__.py":
+                        # If the file path is `a/b/__init__.py`, exclude the file name
+                        # and make sure the module id is just `a.b`, not `a.b.__init__`.
+                        id = '.'.join(dnparts)
+                    else:
+                        base, ext = os.path.splitext(file)
+                        id = '.'.join(dnparts + [base])
+                    modules[id] = os.path.join(dn, file)
+        return modules
+
+    def find_missing_cache_files(self, modules: Dict[str, str],
+                                 manager: build.BuildManager) -> Set[str]:
+        missing = {}
+        for id, path in modules.items():
+            meta = build.find_cache_meta(id, path, manager)
+            if not build.is_meta_fresh(meta, id, path, manager):
+                missing[id] = path
+        return set(missing.values())
+
+    def parse_module(self, program_text: str, incremental: int = 0) -> List[Tuple[str, str, str]]:
+        """Return the module and program names for a test case.
+
+        Normally, the unit tests will parse the default ('__main__')
+        module and follow all the imports listed there. You can override
+        this behavior and instruct the tests to check multiple modules
+        by using a comment like this in the test case input:
+
+          # cmd: mypy -m foo.bar foo.baz
+
+        Return a list of tuples (module name, file name, program text).
+        """
+        m = re.search('# cmd: mypy -m ([a-zA-Z0-9_. ]+)$', program_text, flags=re.MULTILINE)
+        m2 = re.search('# cmd2: mypy -m ([a-zA-Z0-9_. ]+)$', program_text, flags=re.MULTILINE)
+        if m2 is not None and incremental == 2:
+            # Optionally return a different command if in the second
+            # stage of incremental mode, otherwise default to reusing
+            # the original cmd.
+            m = m2
+
+        if m:
+            # The test case wants to use a non-default main
+            # module. Look up the module and give it as the thing to
+            # analyze.
+            module_names = m.group(1)
+            out = []
+            for module_name in module_names.split(' '):
+                path = build.find_module(module_name, [test_temp_dir])
+                with open(path) as f:
+                    program_text = f.read()
+                out.append((module_name, path, program_text))
+            return out
+        else:
+            return [('__main__', 'main', program_text)]
+
+    def parse_options(self, program_text: str, testcase: DataDrivenTestCase) -> Options:
+        options = Options()
+        flags = re.search('# flags: (.*)$', program_text, flags=re.MULTILINE)
+
+        flag_list = None
+        if flags:
+            flag_list = flags.group(1).split()
+            targets, options = process_options(flag_list, require_targets=False)
+            if targets:
+                # TODO: support specifying targets via the flags pragma
+                raise RuntimeError('Specifying targets via the flags pragma is not supported.')
+        else:
+            options = Options()
+
+        # Allow custom python version to override testcase_pyversion
+        if (not flag_list or
+                all(flag not in flag_list for flag in ['--python-version', '-2', '--py2'])):
+            options.python_version = testcase_pyversion(testcase.file, testcase.name)
+
+        return options
diff --git a/mypy/test/testcmdline.py b/mypy/test/testcmdline.py
new file mode 100644
index 0000000..f2ddc9b
--- /dev/null
+++ b/mypy/test/testcmdline.py
@@ -0,0 +1,104 @@
+"""Test cases for the command line.
+
+To begin we test that "mypy <directory>[/]" always recurses down the
+whole tree.
+"""
+
+import os
+import re
+import subprocess
+import sys
+
+from typing import Tuple, List, Dict, Set
+
+from mypy.myunit import Suite, SkipTestCaseException, AssertionFailure
+from mypy.test.config import test_data_prefix, test_temp_dir
+from mypy.test.data import parse_test_cases, DataDrivenTestCase
+from mypy.test.helpers import assert_string_arrays_equal
+from mypy.version import __version__, base_version
+
+# Path to Python 3 interpreter
+python3_path = sys.executable
+
+# Files containing test case descriptions.
+cmdline_files = ['cmdline.test']
+
+
+class PythonEvaluationSuite(Suite):
+
+    def cases(self) -> List[DataDrivenTestCase]:
+        c = []  # type: List[DataDrivenTestCase]
+        for f in cmdline_files:
+            c += parse_test_cases(os.path.join(test_data_prefix, f),
+                                  test_python_evaluation,
+                                  base_path=test_temp_dir,
+                                  optional_out=True,
+                                  native_sep=True)
+        return c
+
+
+def test_python_evaluation(testcase: DataDrivenTestCase) -> None:
+    # Write the program to a file.
+    program = '_program.py'
+    program_path = os.path.join(test_temp_dir, program)
+    with open(program_path, 'w') as file:
+        for s in testcase.input:
+            file.write('{}\n'.format(s))
+    args = parse_args(testcase.input[0])
+    args.append('--show-traceback')
+    # Type check the program.
+    fixed = [python3_path,
+             os.path.join(testcase.old_cwd, 'scripts', 'mypy')]
+    process = subprocess.Popen(fixed + args,
+                               stdout=subprocess.PIPE,
+                               stderr=subprocess.STDOUT,
+                               cwd=test_temp_dir)
+    outb = process.stdout.read()
+    # Split output into lines.
+    out = [s.rstrip('\n\r') for s in str(outb, 'utf8').splitlines()]
+    # Remove temp file.
+    os.remove(program_path)
+    # Compare actual output to expected.
+    if testcase.output_files:
+        for path, expected_content in testcase.output_files:
+            if not os.path.exists(path):
+                raise AssertionFailure(
+                    'Expected file {} was not produced by test case'.format(path))
+            with open(path, 'r') as output_file:
+                actual_output_content = output_file.read().splitlines()
+            noramlized_output = normalize_file_output(actual_output_content,
+                                                      os.path.abspath(test_temp_dir))
+            assert_string_arrays_equal(expected_content.splitlines(), noramlized_output,
+                                       'Output file {} did not match its expected output'.format(
+                                           path))
+    else:
+        assert_string_arrays_equal(testcase.output, out,
+                                   'Invalid output ({}, line {})'.format(
+                                       testcase.file, testcase.line))
+
+
+def parse_args(line: str) -> List[str]:
+    """Parse the first line of the program for the command line.
+
+    This should have the form
+
+      # cmd: mypy <options>
+
+    For example:
+
+      # cmd: mypy pkg/
+    """
+    m = re.match('# cmd: mypy (.*)$', line)
+    if not m:
+        return []  # No args; mypy will spit out an error.
+    return m.group(1).split()
+
+
+def normalize_file_output(content: List[str], current_abs_path: str) -> List[str]:
+    """Normalize file output for comparison."""
+    timestamp_regex = re.compile('\d{10}')
+    result = [x.replace(current_abs_path, '$PWD') for x in content]
+    result = [x.replace(__version__, '$VERSION') for x in result]
+    result = [x.replace(base_version, '$VERSION') for x in result]
+    result = [timestamp_regex.sub('$TIMESTAMP', x) for x in result]
+    return result
diff --git a/mypy/test/testextensions.py b/mypy/test/testextensions.py
new file mode 100644
index 0000000..af3916f
--- /dev/null
+++ b/mypy/test/testextensions.py
@@ -0,0 +1,125 @@
+import sys
+import pickle
+import typing
+try:
+    import collections.abc as collections_abc
+except ImportError:
+    import collections as collections_abc  # type: ignore # PY32 and earlier
+from unittest import TestCase, main, skipUnless
+sys.path[0:0] = ['extensions']
+from mypy_extensions import TypedDict
+
+
+class BaseTestCase(TestCase):
+
+    def assertIsSubclass(self, cls, class_or_tuple, msg=None):
+        if not issubclass(cls, class_or_tuple):
+            message = '%r is not a subclass of %r' % (cls, class_or_tuple)
+            if msg is not None:
+                message += ' : %s' % msg
+            raise self.failureException(message)
+
+    def assertNotIsSubclass(self, cls, class_or_tuple, msg=None):
+        if issubclass(cls, class_or_tuple):
+            message = '%r is a subclass of %r' % (cls, class_or_tuple)
+            if msg is not None:
+                message += ' : %s' % msg
+            raise self.failureException(message)
+
+
+PY36 = sys.version_info[:2] >= (3, 6)
+
+PY36_TESTS = """
+Label = TypedDict('Label', [('label', str)])
+
+class Point2D(TypedDict):
+    x: int
+    y: int
+
+class LabelPoint2D(Point2D, Label): ...
+"""
+
+if PY36:
+    exec(PY36_TESTS)
+
+
+class TypedDictTests(BaseTestCase):
+
+    def test_basics_iterable_syntax(self):
+        Emp = TypedDict('Emp', {'name': str, 'id': int})
+        self.assertIsSubclass(Emp, dict)
+        self.assertIsSubclass(Emp, typing.MutableMapping)
+        self.assertNotIsSubclass(Emp, collections_abc.Sequence)
+        jim = Emp(name='Jim', id=1)
+        self.assertIs(type(jim), dict)
+        self.assertEqual(jim['name'], 'Jim')
+        self.assertEqual(jim['id'], 1)
+        self.assertEqual(Emp.__name__, 'Emp')
+        self.assertEqual(Emp.__module__, 'mypy.test.testextensions')
+        self.assertEqual(Emp.__bases__, (dict,))
+        self.assertEqual(Emp.__annotations__, {'name': str, 'id': int})
+
+    def test_basics_keywords_syntax(self):
+        Emp = TypedDict('Emp', name=str, id=int)
+        self.assertIsSubclass(Emp, dict)
+        self.assertIsSubclass(Emp, typing.MutableMapping)
+        self.assertNotIsSubclass(Emp, collections_abc.Sequence)
+        jim = Emp(name='Jim', id=1)  # type: ignore # mypy doesn't support keyword syntax yet
+        self.assertIs(type(jim), dict)
+        self.assertEqual(jim['name'], 'Jim')
+        self.assertEqual(jim['id'], 1)
+        self.assertEqual(Emp.__name__, 'Emp')
+        self.assertEqual(Emp.__module__, 'mypy.test.testextensions')
+        self.assertEqual(Emp.__bases__, (dict,))
+        self.assertEqual(Emp.__annotations__, {'name': str, 'id': int})
+
+    def test_typeddict_errors(self):
+        Emp = TypedDict('Emp', {'name': str, 'id': int})
+        self.assertEqual(TypedDict.__module__, 'mypy_extensions')
+        jim = Emp(name='Jim', id=1)
+        with self.assertRaises(TypeError):
+            isinstance({}, Emp)
+        with self.assertRaises(TypeError):
+            isinstance(jim, Emp)
+        with self.assertRaises(TypeError):
+            issubclass(dict, Emp)
+        with self.assertRaises(TypeError):
+            TypedDict('Hi', x=1)
+        with self.assertRaises(TypeError):
+            TypedDict('Hi', [('x', int), ('y', 1)])
+        with self.assertRaises(TypeError):
+            TypedDict('Hi', [('x', int)], y=int)
+
+    @skipUnless(PY36, 'Python 3.6 required')
+    def test_py36_class_syntax_usage(self):
+        self.assertEqual(LabelPoint2D.__annotations__, {'x': int, 'y': int, 'label': str})  # noqa
+        self.assertEqual(LabelPoint2D.__bases__, (dict,))  # noqa
+        self.assertNotIsSubclass(LabelPoint2D, typing.Sequence)  # noqa
+        not_origin = Point2D(x=0, y=1)  # noqa
+        self.assertEqual(not_origin['x'], 0)
+        self.assertEqual(not_origin['y'], 1)
+        other = LabelPoint2D(x=0, y=1, label='hi')  # noqa
+        self.assertEqual(other['label'], 'hi')
+
+    def test_pickle(self):
+        global EmpD  # pickle wants to reference the class by name
+        EmpD = TypedDict('EmpD', name=str, id=int)
+        jane = EmpD({'name': 'jane', 'id': 37})
+        for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+            z = pickle.dumps(jane, proto)
+            jane2 = pickle.loads(z)
+            self.assertEqual(jane2, jane)
+            self.assertEqual(jane2, {'name': 'jane', 'id': 37})
+            ZZ = pickle.dumps(EmpD, proto)
+            EmpDnew = pickle.loads(ZZ)
+            self.assertEqual(EmpDnew({'name': 'jane', 'id': 37}), jane)
+
+    def test_optional(self):
+        EmpD = TypedDict('EmpD', name=str, id=int)
+
+        self.assertEqual(typing.Optional[EmpD], typing.Union[None, EmpD])
+        self.assertNotEqual(typing.List[EmpD], typing.Tuple[EmpD])
+
+
+if __name__ == '__main__':
+    main()
diff --git a/mypy/test/testgraph.py b/mypy/test/testgraph.py
new file mode 100644
index 0000000..d88ca1c
--- /dev/null
+++ b/mypy/test/testgraph.py
@@ -0,0 +1,69 @@
+"""Test cases for graph processing code in build.py."""
+
+from typing import AbstractSet, Dict, Set
+
+from mypy.myunit import Suite, assert_equal
+from mypy.build import BuildManager, State, BuildSourceSet
+from mypy.build import topsort, strongly_connected_components, sorted_components, order_ascc
+from mypy.version import __version__
+from mypy.options import Options
+from mypy.report import Reports
+
+
+class GraphSuite(Suite):
+
+    def test_topsort(self) -> None:
+        a = frozenset({'A'})
+        b = frozenset({'B'})
+        c = frozenset({'C'})
+        d = frozenset({'D'})
+        data = {a: {b, c}, b: {d}, c: {d}}  # type: Dict[AbstractSet[str], Set[AbstractSet[str]]]
+        res = list(topsort(data))
+        assert_equal(res, [{d}, {b, c}, {a}])
+
+    def test_scc(self) -> None:
+        vertices = {'A', 'B', 'C', 'D'}
+        edges = {'A': ['B', 'C'],
+                 'B': ['C'],
+                 'C': ['B', 'D'],
+                 'D': []}  # type: Dict[str, List[str]]
+        sccs = set(frozenset(x) for x in strongly_connected_components(vertices, edges))
+        assert_equal(sccs,
+                     {frozenset({'A'}),
+                      frozenset({'B', 'C'}),
+                      frozenset({'D'})})
+
+    def _make_manager(self) -> BuildManager:
+        manager = BuildManager(
+            data_dir='',
+            lib_path=[],
+            ignore_prefix='',
+            source_set=BuildSourceSet([]),
+            reports=Reports('', {}),
+            options=Options(),
+            version_id=__version__,
+        )
+        return manager
+
+    def test_sorted_components(self) -> None:
+        manager = self._make_manager()
+        graph = {'a': State('a', None, 'import b, c', manager),
+                 'd': State('d', None, 'pass', manager),
+                 'b': State('b', None, 'import c', manager),
+                 'c': State('c', None, 'import b, d', manager),
+                 }
+        res = sorted_components(graph)
+        assert_equal(res, [frozenset({'d'}), frozenset({'c', 'b'}), frozenset({'a'})])
+
+    def test_order_ascc(self) -> None:
+        manager = self._make_manager()
+        graph = {'a': State('a', None, 'import b, c', manager),
+                 'd': State('d', None, 'def f(): import a', manager),
+                 'b': State('b', None, 'import c', manager),
+                 'c': State('c', None, 'import b, d', manager),
+                 }
+        res = sorted_components(graph)
+        assert_equal(res, [frozenset({'a', 'd', 'c', 'b'})])
+        ascc = res[0]
+        scc = order_ascc(graph, ascc)
+        assert_equal(scc, ['d', 'c', 'b', 'a'])
diff --git a/mypy/test/testinfer.py b/mypy/test/testinfer.py
new file mode 100644
index 0000000..2142456
--- /dev/null
+++ b/mypy/test/testinfer.py
@@ -0,0 +1,223 @@
+"""Test cases for type inference helper functions."""
+
+from typing import List, Optional, Tuple, Union
+
+from mypy.myunit import Suite, assert_equal, assert_true
+from mypy.checkexpr import map_actuals_to_formals
+from mypy.nodes import ARG_POS, ARG_OPT, ARG_STAR, ARG_STAR2, ARG_NAMED
+from mypy.types import AnyType, TupleType, Type
+
+
+class MapActualsToFormalsSuite(Suite):
+    """Test cases for checkexpr.map_actuals_to_formals."""
+
+    def test_basic(self) -> None:
+        self.assert_map([], [], [])
+
+    def test_positional_only(self) -> None:
+        self.assert_map([ARG_POS],
+                        [ARG_POS],
+                        [[0]])
+        self.assert_map([ARG_POS, ARG_POS],
+                        [ARG_POS, ARG_POS],
+                        [[0], [1]])
+
+    def test_optional(self) -> None:
+        self.assert_map([],
+                        [ARG_OPT],
+                        [[]])
+        self.assert_map([ARG_POS],
+                        [ARG_OPT],
+                        [[0]])
+        self.assert_map([ARG_POS],
+                        [ARG_OPT, ARG_OPT],
+                        [[0], []])
+
+    def test_callee_star(self) -> None:
+        self.assert_map([],
+                        [ARG_STAR],
+                        [[]])
+        self.assert_map([ARG_POS],
+                        [ARG_STAR],
+                        [[0]])
+        self.assert_map([ARG_POS, ARG_POS],
+                        [ARG_STAR],
+                        [[0, 1]])
+
+    def test_caller_star(self) -> None:
+        self.assert_map([ARG_STAR],
+                        [ARG_STAR],
+                        [[0]])
+        self.assert_map([ARG_POS, ARG_STAR],
+                        [ARG_STAR],
+                        [[0, 1]])
+        self.assert_map([ARG_STAR],
+                        [ARG_POS, ARG_STAR],
+                        [[0], [0]])
+        self.assert_map([ARG_STAR],
+                        [ARG_OPT, ARG_STAR],
+                        [[0], [0]])
+
+    def test_too_many_caller_args(self) -> None:
+        self.assert_map([ARG_POS],
+                        [],
+                        [])
+        self.assert_map([ARG_STAR],
+                        [],
+                        [])
+        self.assert_map([ARG_STAR],
+                        [ARG_POS],
+                        [[0]])
+
+    def test_tuple_star(self) -> None:
+        self.assert_vararg_map(
+            [ARG_STAR],
+            [ARG_POS],
+            [[0]],
+            self.tuple(AnyType()))
+        self.assert_vararg_map(
+            [ARG_STAR],
+            [ARG_POS, ARG_POS],
+            [[0], [0]],
+            self.tuple(AnyType(), AnyType()))
+        self.assert_vararg_map(
+            [ARG_STAR],
+            [ARG_POS, ARG_OPT, ARG_OPT],
+            [[0], [0], []],
+            self.tuple(AnyType(), AnyType()))
+
+    def tuple(self, *args: Type) -> TupleType:
+        return TupleType(list(args), None)
+
+    def test_named_args(self) -> None:
+        self.assert_map(
+            ['x'],
+            [(ARG_POS, 'x')],
+            [[0]])
+        self.assert_map(
+            ['y', 'x'],
+            [(ARG_POS, 'x'), (ARG_POS, 'y')],
+            [[1], [0]])
+
+    def test_some_named_args(self) -> None:
+        self.assert_map(
+            ['y'],
+            [(ARG_OPT, 'x'), (ARG_OPT, 'y'), (ARG_OPT, 'z')],
+            [[], [0], []])
+
+    def test_missing_named_arg(self) -> None:
+        self.assert_map(
+            ['y'],
+            [(ARG_OPT, 'x')],
+            [[]])
+
+    def test_duplicate_named_arg(self) -> None:
+        self.assert_map(
+            ['x', 'x'],
+            [(ARG_OPT, 'x')],
+            [[0, 1]])
+
+    def test_varargs_and_bare_asterisk(self) -> None:
+        self.assert_map(
+            [ARG_STAR],
+            [ARG_STAR, (ARG_NAMED, 'x')],
+            [[0], []])
+        self.assert_map(
+            [ARG_STAR, 'x'],
+            [ARG_STAR, (ARG_NAMED, 'x')],
+            [[0], [1]])
+
+    def test_keyword_varargs(self) -> None:
+        self.assert_map(
+            ['x'],
+            [ARG_STAR2],
+            [[0]])
+        self.assert_map(
+            ['x', ARG_STAR2],
+            [ARG_STAR2],
+            [[0, 1]])
+        self.assert_map(
+            ['x', ARG_STAR2],
+            [(ARG_POS, 'x'), ARG_STAR2],
+            [[0], [1]])
+        self.assert_map(
+            [ARG_POS, ARG_STAR2],
+            [(ARG_POS, 'x'), ARG_STAR2],
+            [[0], [1]])
+
+    def test_both_kinds_of_varargs(self) -> None:
+        self.assert_map(
+            [ARG_STAR, ARG_STAR2],
+            [(ARG_POS, 'x'), (ARG_POS, 'y')],
+            [[0, 1], [0, 1]])
+
+    def test_special_cases(self) -> None:
+        self.assert_map([ARG_STAR],
+                        [ARG_STAR, ARG_STAR2],
+                        [[0], []])
+        self.assert_map([ARG_STAR, ARG_STAR2],
+                        [ARG_STAR, ARG_STAR2],
+                        [[0], [1]])
+        self.assert_map([ARG_STAR2],
+                        [(ARG_POS, 'x'), ARG_STAR2],
+                        [[0], [0]])
+        self.assert_map([ARG_STAR2],
+                        [ARG_STAR2],
+                        [[0]])
+
+    def assert_map(self,
+                   caller_kinds_: List[Union[int, str]],
+                   callee_kinds_: List[Union[int, Tuple[int, str]]],
+                   expected: List[List[int]],
+                   ) -> None:
+        caller_kinds, caller_names = expand_caller_kinds(caller_kinds_)
+        callee_kinds, callee_names = expand_callee_kinds(callee_kinds_)
+        result = map_actuals_to_formals(
+            caller_kinds,
+            caller_names,
+            callee_kinds,
+            callee_names,
+            lambda i: AnyType())
+        assert_equal(result, expected)
+
+    def assert_vararg_map(self,
+                          caller_kinds: List[int],
+                          callee_kinds: List[int],
+                          expected: List[List[int]],
+                          vararg_type: Type,
+                          ) -> None:
+        result = map_actuals_to_formals(
+            caller_kinds,
+            [],
+            callee_kinds,
+            [],
+            lambda i: vararg_type)
+        assert_equal(result, expected)
+
+
+def expand_caller_kinds(kinds_or_names: List[Union[int, str]]
+                        ) -> Tuple[List[int], List[Optional[str]]]:
+    kinds = []
+    names = []
+    for k in kinds_or_names:
+        if isinstance(k, str):
+            kinds.append(ARG_NAMED)
+            names.append(k)
+        else:
+            kinds.append(k)
+            names.append(None)
+    return kinds, names
+
+
+def expand_callee_kinds(kinds_and_names: List[Union[int, Tuple[int, str]]]
+                        ) -> Tuple[List[int], List[Optional[str]]]:
+    kinds = []
+    names = []
+    for v in kinds_and_names:
+        if isinstance(v, tuple):
+            kinds.append(v[0])
+            names.append(v[1])
+        else:
+            kinds.append(v)
+            names.append(None)
+    return kinds, names
diff --git a/mypy/test/testlex.py b/mypy/test/testlex.py
new file mode 100644
index 0000000..138aef8
--- /dev/null
+++ b/mypy/test/testlex.py
@@ -0,0 +1,466 @@
+"""Lexical analyzer test cases"""
+
+from typing import List, Union
+
+from mypy.myunit import Suite, assert_equal
+from mypy.lex import lex
+
+
+class LexerSuite(Suite):
+    def test_empty(self) -> None:
+        self.assert_lex('', 'Eof()')
+
+    def test_keywords(self) -> None:
+        self.assert_lex(
+            'if else elif def return pass',
+            'Keyword(if) Keyword( else) Keyword( elif) Keyword( def) '
+            'Keyword( return) Keyword( pass) Break() Eof()')
+
+        self.assert_lex(
+            'from import as class global',
+            'Keyword(from) Keyword( import) Keyword( as) Keyword( class) '
+            'Keyword( global) ...')
+
+    def test_identifiers(self) -> None:
+        self.assert_lex(
+            'i x FooBar FOO_BAR __x var',
+            'Name(i) Name( x) Name( FooBar) Name( FOO_BAR) Name( __x) '
+            'Name( var) Break() Eof()')
+
+        self.assert_lex(
+            'any interface void',
+            'Name(any) Name( interface) Name( void) Break() Eof()')
+
+    def test_int_literals(self) -> None:
+        self.assert_lex(
+            '0 00 1 0987654321 10002000300040005000600070008000',
+            'IntLit(0) IntLit( 00) IntLit( 1) LexError( 0987654321) '
+            'IntLit( 10002000300040005000600070008000) Break() Eof()')
+
+    def test_hex_int_literals(self) -> None:
+        self.assert_lex('0x0 0xabcedf0189 0xAFe 0X2',
+                        'IntLit(0x0) IntLit( 0xabcedf0189) IntLit( 0xAFe) '
+                        'IntLit( 0X2) ...')
+
+    def test_oct_int_literals(self) -> None:
+        self.assert_lex('0o0 0o127 0O1',
+                        'IntLit(0o0) IntLit( 0o127) IntLit( 0O1) ...')
+
+    def test_bin_int_literals(self) -> None:
+        self.assert_lex('0b0 0b110 0B1',
+                        'IntLit(0b0) IntLit( 0b110) IntLit( 0B1) ...')
+
+    def test_float_literals(self) -> None:
+        self.assert_lex('1.2 .1 1.',
+                        'FloatLit(1.2) FloatLit( .1) FloatLit( 1.) ...')
+
+        self.assert_lex(
+            '1e2 1.2e+3 1.3e-12',
+            'FloatLit(1e2) FloatLit( 1.2e+3) FloatLit( 1.3e-12) ...')
+
+        self.assert_lex('1.e2', 'FloatLit(1.e2) ...')
+
+    def test_comments(self) -> None:
+        self.assert_lex('# foo "" bar' + '\n' + 'x #x',
+                        'Name(# foo "" bar\\nx) Break( #x) Eof()')
+
+    def test_empty_lines(self) -> None:
+        self.assert_lex(r'\n1', r'IntLit(\n1) ...')
+        self.assert_lex(r'\n\n1', r'IntLit(\n\n1) ...')
+        self.assert_lex(r'1\n\n2', r'IntLit(1) Break(\n\n) IntLit(2) ...')
+
+    def test_line_breaks(self) -> None:
+        self.assert_lex('1\\r2', 'IntLit(1) Break(\\r) IntLit(2) ...')
+        self.assert_lex('1\\r\\n2', 'IntLit(1) Break(\\r\\n) IntLit(2) ...')
+
+    def test_operators(self) -> None:
+        self.assert_lex('- + < > == != <= >= .',
+                        'Op(-) Op( +) Op( <) Op( >) Op( ==) Op( !=) Op( <=) '
+                        'Op( >=) Op( .) ...')
+
+        self.assert_lex('* / % // **',
+                        'Op(*) Op( /) Op( %) Op( //) Op( **) ...')
+
+        self.assert_lex('& | ^ ~ << >>',
+                        'Op(&) Op( |) Op( ^) Op( ~) Op( <<) Op( >>) ...')
+
+        self.assert_lex('in is and or not',
+                        'Op(in) Op( is) Op( and) Op( or) Op( not) ...')
+
+    def test_punctuators(self) -> None:
+        self.assert_lex(': = ,', 'Colon(:) Punct( =) Punct( ,) ...')
+
+        self.assert_lex(
+            '+= -= *= %= //=',
+            'Punct(+=) Punct( -=) Punct( *=) Punct( %=) Punct( //=) ...')
+        self.assert_lex('**=', 'Punct(**=) ...')
+        self.assert_lex(
+            '&= |= ^= <<= >>=',
+            'Punct(&=) Punct( |=) Punct( ^=) Punct( <<=) Punct( >>=) ...')
+
+    def test_basic_indentation(self) -> None:
+        self.assert_lex(
+            'y' + '\n' + '  x',
+            'Name(y) Break(\\n) Indent(  ) Name(x) Break() Dedent() Eof()')
+
+        self.assert_lex(
+            'y' + '\n' + '  x' + '\n' + 'z',
+            'Name(y) Break(\\n) Indent(  ) Name(x) Break(\\n) Dedent() '
+            'Name(z) Break() Eof()')
+
+    def test_multiple_indent_levels(self) -> None:
+        self.assert_lex('y' + '\n' +
+                        '  x' + '\n' +
+                        '  y' + '\n' +
+                        '    z',
+                        'Name(y) Break(\\n) ' +
+                        'Indent(  ) Name(x) Break(\\n) ' +
+                        'Name(  y) Break(\\n) ' +
+                        'Indent(    ) Name(z) Break() ' +
+                        'Dedent() Dedent() Eof()')
+
+        self.assert_lex('y' + '\n' +
+                        '  x' + '\n' +
+                        '    z' + '\n' +
+                        '  y',
+                        'Name(y) Break(\\n) ' +
+                        'Indent(  ) Name(x) Break(\\n) ' +
+                        'Indent(    ) Name(z) Break(\\n) ' +
+                        'Dedent() Name(  y) Break() ' +
+                        'Dedent() Eof()')
+
+    def test_tab_indent(self) -> None:
+        self.assert_lex('y' + '\n' +
+                        '\t' + 'x' + '\n' +
+                        '        y' + '\n' +
+                        ' ' + '\t' + 'z',
+                        'Name(y) Break(\\n) ' +
+                        'Indent(\\t) Name(x) Break(\\n) ' +
+                        'Name(        y) Break(\\n) ' +
+                        'Name( \\tz) Break() ' +
+                        'Dedent() Eof()')
+
+    def test_comment_after_dedent(self) -> None:
+        self.assert_lex('y\n'
+                        '  x\n'
+                        '# Foo\n'
+                        'z',
+                        r'Name(y) Break(\n) Indent(  ) Name(x) '
+                        r'Break(\n# Foo\n) '
+                        r'Dedent() Name(z) Break() Eof()')
+
+    def test_parens(self) -> None:
+        self.assert_lex('( x )', 'Punct(() Name( x) Punct( )) Break() Eof()')
+        self.assert_lex(
+            '( x' + '\n' + '  y )',
+            'Punct(() Name( x) Name(\\n  y) Punct( )) Break() Eof()')
+
+        self.assert_lex('()' + '\n' + ' y',
+                        'Punct(() Punct()) Break(\\n) Indent( ) Name(y) '
+                        'Break() Dedent() Eof()')
+
+        # [ ... ] and { ... }.
+        self.assert_lex(
+            '[ x' + '\n' + '  y ]',
+            'Punct([) Name( x) Name(\\n  y) Punct( ]) Break() Eof()')
+        self.assert_lex(
+            '{ x' + '\n' + '  y }',
+            'Punct({) Name( x) Name(\\n  y) Punct( }) Break() Eof()')
+
+        # Nested brackets.
+        self.assert_lex(
+            '({}' + '\n' + ' y)',
+            'Punct(() Punct({) Punct(}) Name(\\n y) Punct()) Break() Eof()')
+
+    def test_brackets_and_line_breaks(self) -> None:
+        # This used to fail.
+        self.assert_lex('{}' + '\n' + '1',
+                        'Punct({) Punct(}) Break(\\n) IntLit(1) Break() Eof()')
+
+    def test_str_literals(self) -> None:
+        self.assert_lex("'' 'foo_bar'",
+                        "StrLit('') StrLit( 'foo_bar') Break() Eof()")
+        self.assert_lex('"" "foo_bar"',
+                        'StrLit("") StrLit( "foo_bar") Break() Eof()')
+
+        self.assert_lex('"\\"" 1', 'StrLit("\\"") IntLit( 1) Break() Eof()')
+        self.assert_lex("'\\'' 1", "StrLit('\\'') IntLit( 1) Break() Eof()")
+
+        self.assert_lex('"\\\\" 1', 'StrLit("\\\\") IntLit( 1) Break() Eof()')
+        self.assert_lex("'\\\\' 1", "StrLit('\\\\') IntLit( 1) Break() Eof()")
+
+    def test_triple_quoted_string_literals(self) -> None:
+        # Single-line
+
+        self.assert_lex("''''''", "StrLit('''''') ...")
+        self.assert_lex("1 '''x''y'''1",
+                        "IntLit(1) StrLit( '''x''y''') IntLit(1) ...")
+
+        self.assert_lex('""""""', 'StrLit("""""") ...')
+        self.assert_lex('"""x""y"""', 'StrLit("""x""y""") ...')
+
+        # Multiple-line
+
+        self.assert_lex("'''" + '\n' + "'''", "StrLit('''\\n''') ...")
+        self.assert_lex("'''x''" + '\n' + "''x'''",
+                        "StrLit('''x''\\n''x''') ...")
+        self.assert_lex("'''''" + '\n' + "'''''",
+                        "StrLit('''''\\n''') StrLit('') ...")
+        self.assert_lex("'''x" + '\n' + 'xyz' + '\n' + "''x'''",
+                        "StrLit('''x\\nxyz\\n''x''') ...")
+
+        self.assert_lex('"""x' + '\n' + 'y"""', 'StrLit("""x\\ny""") ...')
+
+    def test_unicode_literals(self) -> None:
+        self.assert_lex("u'' u'foo'",
+                        "UnicodeLit(u'') UnicodeLit( u'foo') ...")
+        self.assert_lex('u"" u"foo"',
+                        'UnicodeLit(u"") UnicodeLit( u"foo") ...')
+        self.assert_lex('ur"" ur"foo"',
+                        'UnicodeLit(ur"") UnicodeLit( ur"foo") ...')
+        self.assert_lex('u"""foo\n"""',
+                        r'UnicodeLit(u"""foo\n""") ...')
+
+    def test_unicode_literal_capital_u(self) -> None:
+        self.assert_lex("U'foo'", "UnicodeLit(U'foo') ...")
+
+    def test_semicolons(self) -> None:
+        self.assert_lex('a;b', 'Name(a) Break(;) Name(b) ...')
+        self.assert_lex('a;', 'Name(a) Break(;) Eof()')
+
+        self.assert_lex(';a', 'Break(;) Name(a) ...')
+        self.assert_lex('a;;b', 'Name(a) Break(;) Break(;) Name(b) ...')
+
+    def test_raw_string(self) -> None:
+        self.assert_lex("r'' r'foo bar'",
+                        "StrLit(r'') StrLit( r'foo bar') ...")
+        self.assert_lex('r"" r"foo bar"',
+                        'StrLit(r"") StrLit( r"foo bar") ...')
+
+        self.assert_lex("r'\\x\\''", "StrLit(r'\\x\\'') ...")
+        self.assert_lex('r"\\x\\""', 'StrLit(r"\\x\\"") ...')
+
+        self.assert_lex("r'\\\\' ''", "StrLit(r'\\\\') StrLit( '') ...")
+        self.assert_lex('r"\\\\" ""', 'StrLit(r"\\\\") StrLit( "") ...')
+
+        self.assert_lex("r'''" + '\n' + "x'''", "StrLit(r'''\\nx''') ...")
+
+    def test_raw_string_with_capital_r(self) -> None:
+        self.assert_lex("R'foo'", "StrLit(R'foo') ...")
+
+    def test_escapes_in_triple_quoted_literals(self) -> None:
+        self.assert_lex(r"'''\''''",
+                        r"StrLit('''\'''') ...")
+        self.assert_lex(r'"""\""""',
+                        r'StrLit("""\"""") ...')
+        self.assert_lex(r'"""\\"""',
+                        r'StrLit("""\\""") ...')
+
+    def test_escapes_in_triple_quoted_raw_literals(self) -> None:
+        self.assert_lex(r"r'''\''''",
+                        r"StrLit(r'''\'''') ...")
+        self.assert_lex(r"r'''\\'''",
+                        r"StrLit(r'''\\''') ...")
+        self.assert_lex(r'r"""\""""',
+                        r'StrLit(r"""\"""") ...')
+
+    def test_bytes(self) -> None:
+        self.assert_lex("b'\\'' b'foo bar'",
+                        "BytesLit(b'\\'') BytesLit( b'foo bar') ...")
+        self.assert_lex('b"\\"" b"foo bar"',
+                        'BytesLit(b"\\"") BytesLit( b"foo bar") ...')
+
+        self.assert_lex("b'''" + '\n' + " x'''", "BytesLit(b'''\\n x''') ...")
+
+    def test_bytes_with_capital_b(self) -> None:
+        self.assert_lex("B'foo'", "BytesLit(B'foo') ...")
+
+    def test_raw_bytes(self) -> None:
+        self.assert_lex("br'x\\x\\''", "BytesLit(br'x\\x\\'') ...")
+        self.assert_lex('br"x\\y\\""', 'BytesLit(br"x\\y\\"") ...')
+
+        self.assert_lex('br"""' + '\n' + 'x"""', 'BytesLit(br"""\\nx""") ...')
+
+    def test_raw_bytes_alternative(self) -> None:
+        self.assert_lex("rb'x\\x\\''", "BytesLit(rb'x\\x\\'') ...")
+
+    def test_backslash(self) -> None:
+        self.assert_lex('a\\' + '\n' + ' b', 'Name(a) Name(\\\\n b) ...')
+        self.assert_lex(
+            'a = \\' + '\n' + ' 1' + '\n' + '=',
+            'Name(a) Punct( =) IntLit( \\\\n 1) Break(\\n) Punct(=) ...')
+
+    def test_backslash_in_string(self) -> None:
+        self.assert_lex("'foo\\" + '\n' + "bar'", "StrLit('foo\\\\nbar') ...")
+        self.assert_lex("'foo\\" + '\n' + ' zar\\' + '\n' + "  bar'",
+                        "StrLit('foo\\\\n zar\\\\n  bar') ...")
+
+        self.assert_lex('"foo\\' + '\n' + 'bar"', 'StrLit("foo\\\\nbar") ...')
+
+    def test_backslash_in_raw_string(self) -> None:
+        self.assert_lex("r'a\\" + '\n' + "b\\'1",
+                        "StrLit(r'a\\\\nb\\') IntLit(1) ...")
+        self.assert_lex("r'a\\" + '\n' + '-\\' + '\n' + "b\\'1",
+                        "StrLit(r'a\\\\n-\\\\nb\\') IntLit(1) ...")
+        self.assert_lex('r"a\\' + '\n' + 'b\\"1',
+                        'StrLit(r"a\\\\nb\\") IntLit(1) ...')
+        self.assert_lex('r"a\\' + '\n' + '-\\' + '\n' + 'b\\"1',
+                        'StrLit(r"a\\\\n-\\\\nb\\") IntLit(1) ...')
+
+    def test_final_dedent(self) -> None:
+        self.assert_lex(
+            '1' + '\n' + ' 1' + '\n',
+            'IntLit(1) Break(\\n) Indent( ) IntLit(1) Break(\\n) Dedent() Eof()')
+
+    def test_empty_line(self) -> None:
+        self.assert_lex('1' + '\n' + ' 1' + '\n' + '\n',
+                        r'IntLit(1) Break(\n) Indent( ) IntLit(1) '
+                        r'Break(\n\n) Dedent() Eof()')
+
+    def test_comments_and_indents(self) -> None:
+        self.assert_lex('1' + '\n' + ' #x' + '\n' + ' y',
+                        r'IntLit(1) Break(\n #x\n) Indent( ) Name(y) '
+                        r'Break() Dedent() Eof()')
+        self.assert_lex('1' + '\n' + '#x' + '\n' + ' y',
+                        r'IntLit(1) Break(\n#x\n) Indent( ) Name(y) '
+                        r'Break() Dedent() Eof()')
+
+    def test_form_feed(self) -> None:
+        self.assert_lex('\x0c' + '\n' + 'x', 'Name(\x0c\\nx) ...')
+
+    def test_comment_after_linebreak(self) -> None:
+        self.assert_lex('1\n# foo\n2',
+                        'IntLit(1) Break(\\n# foo\\n) IntLit(2) ...')
+        self.assert_lex('1\n# foo',
+                        'IntLit(1) Break(\\n# foo) Eof()')
+
+    def test_line_numbers(self) -> None:
+        self.assert_line('a\\nb', [1, 1, 2, 2, 2])
+
+        self.assert_line('(\\nb)', [1, 2, 2])  # Note: omit break and eof tokens
+
+        self.assert_line('a\\n b', [1, 1,      # a, break
+                                    2, 2, 2,   # indent, b, break
+                                    2, 2])     # dedent, break
+        self.assert_line('a\\n b\\nc', [1, 1,       # a, break
+                                        2, 2, 2,    # indent, b, break
+                                        3, 3])      # dedent, c
+
+        self.assert_line('a\\rb', [1, 1, 2])
+        self.assert_line('a\\r\\nb', [1, 1, 2])
+
+        self.assert_line('"""x""" 1', [1, 1])
+        self.assert_line('"""x\\ny""" 1', [1, 2])
+        self.assert_line('"""x\\r\\ny""" 1', [1, 2])
+        self.assert_line('"""x\\ry""" 1', [1, 2])
+        self.assert_line('"""x\\n\\ny""" 1', [1, 3])
+        self.assert_line('\\n"""x\\ny""" 1', [2, 3])
+
+        self.assert_line('"x" 1', [1, 1])
+        self.assert_line('"\\\\n" 1', [1, 2])
+        self.assert_line('"\\\\nx\\\\n" 1', [1, 3])
+
+        self.assert_line('r"x" 1', [1, 1])
+        self.assert_line('r"\\\\n" 1', [1, 2])
+        self.assert_line('r"\\\\nx\\\\n" 1', [1, 3])
+
+    def test_backslash_line(self) -> None:
+        self.assert_line('a\\\\n 1\\n=', [1, 2, 2, 3])
+
+    def test_invalid_parens(self) -> None:
+        self.assert_lex('([\\n )\\n1',
+                        'Punct(() Punct([) Punct(\\n )) IntLit(\\n1) ...')
+        self.assert_lex('])', 'Punct(]) Punct()) ...')
+        self.assert_lex('(]\\n )', 'Punct(() Punct(]) Punct(\\n )) ...')
+        self.assert_lex('(\\n ])', 'Punct(() Punct(\\n ]) Punct()) ...')
+
+    def test_invalid_indent(self) -> None:
+        self.assert_lex('x\\n  y\\n z',
+                        'Name(x) Break(\\n) Indent(  ) Name(y) ' +
+                        'Break(\\n) Dedent() LexError( ) Name(z) ...')
+
+    def test_invalid_backslash(self) -> None:
+        self.assert_lex('\\ \\nx', 'LexError(\\) Break( \\n) Name(x) ...')
+        self.assert_lex('\\ \\nx', 'LexError(\\) Break( \\n) Name(x) ...')
+
+    def test_non_terminated_string_literal(self) -> None:
+        self.assert_lex("'", 'LexError(\') ...')
+        self.assert_lex("'\\na", 'LexError(\') Break(\\n) Name(a) ...')
+
+        self.assert_lex('"', 'LexError(") ...')
+        self.assert_lex('"\\na', 'LexError(") Break(\\n) Name(a) ...')
+
+        self.assert_lex("r'", 'LexError(r\') ...')
+        self.assert_lex('r"', 'LexError(r") ...')
+
+        self.assert_lex('"""', 'LexError(""") ...')
+        self.assert_lex('"""\\n', 'LexError("""\\n) ...')
+
+        self.assert_lex("'''", "LexError(''') ...")
+        self.assert_lex("'''\\n", "LexError('''\\n) ...")
+
+        self.assert_lex("'\\", 'LexError(\'\\) ...')
+        self.assert_lex("'\\\\n", 'LexError(\'\\\\n) ...')
+        self.assert_lex("r'\\", 'LexError(r\'\\) ...')
+        self.assert_lex("r'\\\\n", 'LexError(r\'\\\\n) ...')
+
+    def test_invalid_hex_int_literals(self) -> None:
+        self.assert_lex('0x', 'LexError(  ) ...')
+        self.assert_lex('0xax', 'LexError(    ) ...')
+
+    def test_latin1_encoding(self) -> None:
+        self.assert_lex(b'# coding: latin1\n"\xbb"',
+                        'StrLit(# coding: latin1\\n"\xbb") Break() Eof()')
+
+    def test_utf8_encoding(self) -> None:
+        self.assert_lex('"\xbb"'.encode('utf8'),
+                        'StrLit("\xbb") Break() Eof()')
+        self.assert_lex(b'"\xbb"',
+                        "LexError('utf8' codec can't decode byte 187 in column 2) "
+                        'Break() Eof()')
+        self.assert_lex(b'\n"abcde\xbc"',
+                        "LexError('utf8' codec can't decode byte 188 in column 7) "
+                        'Break() Eof()')
+
+    def test_byte_order_mark(self) -> None:
+        self.assert_lex('\ufeff"\xbb"'.encode('utf8'),
+                        'Bom(\ufeff) StrLit("\xbb") Break() Eof()')
+
+    def test_long_comment(self) -> None:
+        prog = '# pass\n' * 1000
+        self.assert_lex(prog, 'Eof(%s)' % repr(prog)[1:-1])
+
+    # TODO
+    #   invalid escape sequences in string literals etc.
+
+    def assert_lex(self, src: Union[str, bytes], lexed: str) -> None:
+        if isinstance(src, str):
+            src = src.replace('\\n', '\n')
+            src = src.replace('\\r', '\r')
+
+        if lexed.endswith(' ...'):
+            lexed = lexed[:-3] + 'Break() Eof()'
+
+        l = lex(src)[0]
+        r = []
+        for t in l:
+            r.append(str(t))
+        act = ' '.join(r)
+        if act != lexed:
+            print('Actual:  ', act)
+            print('Expected:', lexed)
+        assert_equal(act, lexed)
+
+    def assert_line(self, s: str, a: List[int]) -> None:
+        s = s.replace('\\n', '\n')
+        s = s.replace('\\r', '\r')
+
+        tt = lex(s)[0]
+        r = []
+        for t in tt:
+            r.append(t.line)
+        if len(r) == len(a) + 2:
+            a = a[:]
+            a.append(a[-1])
+            a.append(a[-1])
+        assert_equal(r, a)
diff --git a/mypy/test/testmoduleinfo.py b/mypy/test/testmoduleinfo.py
new file mode 100644
index 0000000..5818479
--- /dev/null
+++ b/mypy/test/testmoduleinfo.py
@@ -0,0 +1,14 @@
+from mypy import moduleinfo
+from mypy.myunit import (
+    Suite, assert_equal, assert_true, assert_false
+)
+
+
+class ModuleInfoSuite(Suite):
+    def test_is_in_module_collection(self) -> None:
+        assert_true(moduleinfo.is_in_module_collection({'foo'}, 'foo'))
+        assert_true(moduleinfo.is_in_module_collection({'foo'}, 'foo.bar'))
+        assert_false(moduleinfo.is_in_module_collection({'foo'}, 'fo'))
+        assert_true(moduleinfo.is_in_module_collection({'foo.bar'}, 'foo.bar'))
+        assert_true(moduleinfo.is_in_module_collection({'foo.bar'}, 'foo.bar.zar'))
+        assert_false(moduleinfo.is_in_module_collection({'foo.bar'}, 'foo'))
diff --git a/mypy/test/testparse.py b/mypy/test/testparse.py
new file mode 100644
index 0000000..d6789c0
--- /dev/null
+++ b/mypy/test/testparse.py
@@ -0,0 +1,79 @@
+"""Tests for the mypy parser."""
+
+import os.path
+
+from typing import List
+
+from mypy import defaults
+from mypy.myunit import Suite, AssertionFailure
+from mypy.test.helpers import assert_string_arrays_equal
+from mypy.test.data import parse_test_cases, DataDrivenTestCase
+from mypy.test import config
+from mypy.parse import parse
+from mypy.errors import CompileError
+from mypy.options import Options
+
+
+class ParserSuite(Suite):
+    parse_files = ['parse.test',
+                   'parse-python2.test']
+
+    def cases(self) -> List[DataDrivenTestCase]:
+        # The test case descriptions are stored in data files.
+        c = []  # type: List[DataDrivenTestCase]
+        for f in self.parse_files:
+            c += parse_test_cases(
+                os.path.join(config.test_data_prefix, f), test_parser)
+        return c
+
+
+def test_parser(testcase: DataDrivenTestCase) -> None:
+    """Perform a single parser test case.
+
+    The argument contains the description of the test case.
+    """
+    options = Options()
+
+    if testcase.file.endswith('python2.test'):
+        options.python_version = defaults.PYTHON2_VERSION
+    else:
+        options.python_version = defaults.PYTHON3_VERSION
+
+    try:
+        n = parse(bytes('\n'.join(testcase.input), 'ascii'),
+                  fnam='main',
+                  errors=None,
+                  options=options)
+        a = str(n).split('\n')
+    except CompileError as e:
+        a = e.messages
+    assert_string_arrays_equal(testcase.output, a,
+                               'Invalid parser output ({}, line {})'.format(
+                                   testcase.file, testcase.line))
+
+
+# The file name shown in test case output. This is displayed in error
+# messages, and must match the file name in the test case descriptions.
+INPUT_FILE_NAME = 'file'
+
+
+class ParseErrorSuite(Suite):
+    def cases(self) -> List[DataDrivenTestCase]:
+        # Test case descriptions are in an external file.
+        return parse_test_cases(os.path.join(config.test_data_prefix,
+                                             'parse-errors.test'),
+                                test_parse_error)
+
+
+def test_parse_error(testcase: DataDrivenTestCase) -> None:
+    try:
+        # Compile temporary file. The test file contains non-ASCII characters.
+        parse(bytes('\n'.join(testcase.input), 'utf-8'), INPUT_FILE_NAME, None, Options())
+        raise AssertionFailure('No errors reported')
+    except CompileError as e:
+        # Verify that there was a compile error and that the error messages
+        # are equivalent.
+        assert_string_arrays_equal(
+            testcase.output, e.messages,
+            'Invalid compiler output ({}, line {})'.format(testcase.file,
+                                                           testcase.line))
diff --git a/mypy/test/testpythoneval.py b/mypy/test/testpythoneval.py
new file mode 100644
index 0000000..cc598d2
--- /dev/null
+++ b/mypy/test/testpythoneval.py
@@ -0,0 +1,135 @@
+"""Test cases for running mypy programs using a Python interpreter.
+
+Each test case type checks a program then runs it using Python. The
+output (stdout) of the program is compared to expected output. Type checking
+uses full builtins and other stubs.
+
+Note: Currently Python interpreter paths are hard coded.
+
+Note: These test cases are *not* included in the main test suite, as including
+      this suite would slow down the main suite too much.
+"""
+
+from contextlib import contextmanager
+import errno
+import os
+import os.path
+import re
+import subprocess
+import sys
+
+import typing
+from typing import Dict, List, Tuple
+
+from mypy.myunit import Suite, SkipTestCaseException
+from mypy.test.config import test_data_prefix, test_temp_dir
+from mypy.test.data import DataDrivenTestCase, parse_test_cases
+from mypy.test.helpers import assert_string_arrays_equal
+from mypy.util import try_find_python2_interpreter
+
+
+# Files which contain test case descriptions.
+python_eval_files = ['pythoneval.test',
+                     'python2eval.test']
+
+python_34_eval_files = ['pythoneval-asyncio.test',
+                        'pythoneval-enum.test']
+
+# Path to Python 3 interpreter
+python3_path = sys.executable
+program_re = re.compile(r'\b_program.py\b')
+
+
+class PythonEvaluationSuite(Suite):
+    def cases(self) -> List[DataDrivenTestCase]:
+        c = []  # type: List[DataDrivenTestCase]
+        for f in python_eval_files:
+            c += parse_test_cases(os.path.join(test_data_prefix, f),
+                                  test_python_evaluation, test_temp_dir, True)
+        if sys.version_info.major == 3 and sys.version_info.minor >= 4:
+            for f in python_34_eval_files:
+                c += parse_test_cases(os.path.join(test_data_prefix, f),
+                    test_python_evaluation, test_temp_dir, True)
+        return c
+
+
+def test_python_evaluation(testcase: DataDrivenTestCase) -> None:
+    """Runs Mypy in a subprocess.
+
+    If this passes without errors, executes the script again with a given Python
+    version.
+    """
+    mypy_cmdline = [
+        python3_path,
+        os.path.join(testcase.old_cwd, 'scripts', 'mypy'),
+        '--show-traceback',
+    ]
+    py2 = testcase.name.lower().endswith('python2')
+    if py2:
+        mypy_cmdline.append('--py2')
+        interpreter = try_find_python2_interpreter()
+        if not interpreter:
+            # Skip, can't find a Python 2 interpreter.
+            raise SkipTestCaseException()
+    else:
+        interpreter = python3_path
+
+    # Write the program to a file.
+    program = '_' + testcase.name + '.py'
+    mypy_cmdline.append(program)
+    program_path = os.path.join(test_temp_dir, program)
+    with open(program_path, 'w') as file:
+        for s in testcase.input:
+            file.write('{}\n'.format(s))
+    # Type check the program.
+    # This uses the same PYTHONPATH as the current process.
+    returncode, out = run(mypy_cmdline)
+    if returncode == 0:
+        # Set up module path for the execution.
+        # This needs the typing module but *not* the mypy module.
+        vers_dir = '2.7' if py2 else '3.2'
+        typing_path = os.path.join(testcase.old_cwd, 'lib-typing', vers_dir)
+        assert os.path.isdir(typing_path)
+        env = os.environ.copy()
+        env['PYTHONPATH'] = typing_path
+        returncode, interp_out = run([interpreter, program], env=env)
+        out += interp_out
+    # Remove temp file.
+    os.remove(program_path)
+    assert_string_arrays_equal(adapt_output(testcase), out,
+                               'Invalid output ({}, line {})'.format(
+                                   testcase.file, testcase.line))
+
+
+def split_lines(*streams: bytes) -> List[str]:
+    """Returns a single list of string lines from the byte streams in args."""
+    return [
+        s.rstrip('\n\r')
+        for stream in streams
+        for s in str(stream, 'utf8').splitlines()
+    ]
+
+
+def adapt_output(testcase: DataDrivenTestCase) -> List[str]:
+    """Translates the generic _program.py into the actual filename."""
+    program = '_' + testcase.name + '.py'
+    return [program_re.sub(program, line) for line in testcase.output]
+
+
+def run(
+    cmdline: List[str], *, env: Dict[str, str] = None, timeout: int = 30
+) -> Tuple[int, List[str]]:
+    """A poor man's subprocess.run() for 3.3 and 3.4 compatibility."""
+    process = subprocess.Popen(
+        cmdline,
+        env=env,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+        cwd=test_temp_dir,
+    )
+    try:
+        out, err = process.communicate(timeout=timeout)
+    except subprocess.TimeoutExpired:
+        out = err = b''
+        process.kill()
+    return process.returncode, split_lines(out, err)
diff --git a/mypy/test/testreports.py b/mypy/test/testreports.py
new file mode 100644
index 0000000..80e6980
--- /dev/null
+++ b/mypy/test/testreports.py
@@ -0,0 +1,40 @@
+"""Test cases for reports generated by mypy."""
+import textwrap
+
+from mypy.myunit import Suite, assert_equal
+from mypy.report import CoberturaPackage, get_line_rate
+
+import lxml.etree as etree
+
+
+class CoberturaReportSuite(Suite):
+    def test_get_line_rate(self) -> None:
+        assert_equal('1.0', get_line_rate(0, 0))
+        assert_equal('0.3333', get_line_rate(1, 3))
+
+    def test_as_xml(self) -> None:
+        cobertura_package = CoberturaPackage('foobar')
+        cobertura_package.covered_lines = 21
+        cobertura_package.total_lines = 42
+
+        child_package = CoberturaPackage('raz')
+        child_package.covered_lines = 10
+        child_package.total_lines = 10
+        child_package.classes['class'] = etree.Element('class')
+
+        cobertura_package.packages['raz'] = child_package
+
+        expected_output = textwrap.dedent('''\
+            <package complexity="1.0" name="foobar" branch-rate="0" line-rate="0.5000">
+              <classes/>
+              <packages>
+                <package complexity="1.0" name="raz" branch-rate="0" line-rate="1.0000">
+                  <classes>
+                    <class/>
+                  </classes>
+                </package>
+              </packages>
+            </package>
+        ''').encode('ascii')
+        assert_equal(expected_output,
+                     etree.tostring(cobertura_package.as_xml(), pretty_print=True))
diff --git a/mypy/test/testsemanal.py b/mypy/test/testsemanal.py
new file mode 100644
index 0000000..4870fa8
--- /dev/null
+++ b/mypy/test/testsemanal.py
@@ -0,0 +1,224 @@
+"""Semantic analyzer test cases"""
+
+import os.path
+
+from typing import Dict, List
+
+from mypy import build
+from mypy.build import BuildSource
+from mypy.myunit import Suite
+from mypy.test.helpers import (
+    assert_string_arrays_equal, normalize_error_messages, testfile_pyversion,
+)
+from mypy.test.data import parse_test_cases, DataDrivenTestCase
+from mypy.test.config import test_data_prefix, test_temp_dir
+from mypy.errors import CompileError
+from mypy.nodes import TypeInfo
+from mypy.options import Options
+
+
+# Semantic analyzer test cases: dump parse tree
+
+# Semantic analysis test case description files.
+semanal_files = ['semanal-basic.test',
+                 'semanal-expressions.test',
+                 'semanal-classes.test',
+                 'semanal-types.test',
+                 'semanal-typealiases.test',
+                 'semanal-modules.test',
+                 'semanal-statements.test',
+                 'semanal-abstractclasses.test',
+                 'semanal-namedtuple.test',
+                 'semanal-typeddict.test',
+                 'semanal-python2.test']
+
+
+def get_semanal_options() -> Options:
+    options = Options()
+    options.use_builtins_fixtures = True
+    options.semantic_analysis_only = True
+    options.show_traceback = True
+    return options
+
+
+class SemAnalSuite(Suite):
+    def cases(self) -> List[DataDrivenTestCase]:
+        c = []  # type: List[DataDrivenTestCase]
+        for f in semanal_files:
+            c += parse_test_cases(os.path.join(test_data_prefix, f),
+                                  test_semanal,
+                                  base_path=test_temp_dir,
+                                  optional_out=True,
+                                  native_sep=True)
+        return c
+
+
+def test_semanal(testcase: DataDrivenTestCase) -> None:
+    """Perform a semantic analysis test case.
+
+    The testcase argument contains a description of the test case
+    (inputs and output).
+    """
+
+    try:
+        src = '\n'.join(testcase.input)
+        options = get_semanal_options()
+        options.python_version = testfile_pyversion(testcase.file)
+        result = build.build(sources=[BuildSource('main', None, src)],
+                             options=options,
+                             alt_lib_path=test_temp_dir)
+        a = result.errors
+        if a:
+            raise CompileError(a)
+        # Include string representations of the source files in the actual
+        # output.
+        for fnam in sorted(result.files.keys()):
+            f = result.files[fnam]
+            # Omit the builtins module and files with a special marker in the
+            # path.
+            # TODO the test is not reliable
+            if (not f.path.endswith((os.sep + 'builtins.pyi',
+                                     'typing.pyi',
+                                     'mypy_extensions.pyi',
+                                     'abc.pyi',
+                                     'collections.pyi'))
+                    and not os.path.basename(f.path).startswith('_')
+                    and not os.path.splitext(
+                        os.path.basename(f.path))[0].endswith('_')):
+                a += str(f).split('\n')
+    except CompileError as e:
+        a = e.messages
+    assert_string_arrays_equal(
+        testcase.output, a,
+        'Invalid semantic analyzer output ({}, line {})'.format(testcase.file,
+                                                                testcase.line))
+
+
+# Semantic analyzer error test cases
+
+# Paths to files containing test case descriptions.
+semanal_error_files = ['semanal-errors.test']
+
+
+class SemAnalErrorSuite(Suite):
+    def cases(self) -> List[DataDrivenTestCase]:
+        # Read test cases from test case description files.
+        c = []  # type: List[DataDrivenTestCase]
+        for f in semanal_error_files:
+            c += parse_test_cases(os.path.join(test_data_prefix, f),
+                                  test_semanal_error, test_temp_dir, optional_out=True)
+        return c
+
+
+def test_semanal_error(testcase: DataDrivenTestCase) -> None:
+    """Perform a test case."""
+
+    try:
+        src = '\n'.join(testcase.input)
+        res = build.build(sources=[BuildSource('main', None, src)],
+                          options=get_semanal_options(),
+                          alt_lib_path=test_temp_dir)
+        a = res.errors
+        assert a, 'No errors reported in {}, line {}'.format(testcase.file, testcase.line)
+    except CompileError as e:
+        # Verify that there was a compile error and that the error messages
+        # are equivalent.
+        a = e.messages
+    assert_string_arrays_equal(
+        testcase.output, normalize_error_messages(a),
+        'Invalid compiler output ({}, line {})'.format(testcase.file, testcase.line))
+
+
+# SymbolNode table export test cases
+
+# Test case descriptions
+semanal_symtable_files = ['semanal-symtable.test']
+
+
+class SemAnalSymtableSuite(Suite):
+    def cases(self) -> List[DataDrivenTestCase]:
+        c = []  # type: List[DataDrivenTestCase]
+        for f in semanal_symtable_files:
+            c += parse_test_cases(os.path.join(test_data_prefix, f),
+                                  self.run_test, test_temp_dir)
+        return c
+
+    def run_test(self, testcase: DataDrivenTestCase) -> None:
+        """Perform a test case."""
+        try:
+            # Build test case input.
+            src = '\n'.join(testcase.input)
+            result = build.build(sources=[BuildSource('main', None, src)],
+                                 options=get_semanal_options(),
+                                 alt_lib_path=test_temp_dir)
+            # The output is the symbol table converted into a string.
+            a = result.errors
+            if a:
+                raise CompileError(a)
+            for f in sorted(result.files.keys()):
+                if f not in ('builtins', 'typing', 'abc'):
+                    a.append('{}:'.format(f))
+                    for s in str(result.files[f].names).split('\n'):
+                        a.append('  ' + s)
+        except CompileError as e:
+            a = e.messages
+        assert_string_arrays_equal(
+            testcase.output, a,
+            'Invalid semantic analyzer output ({}, line {})'.format(
+                testcase.file, testcase.line))
+
+
+# Type info export test cases
+
+semanal_typeinfo_files = ['semanal-typeinfo.test']
+
+
+class SemAnalTypeInfoSuite(Suite):
+    def cases(self) -> List[DataDrivenTestCase]:
+        """Test case descriptions"""
+        c = []  # type: List[DataDrivenTestCase]
+        for f in semanal_typeinfo_files:
+            c += parse_test_cases(os.path.join(test_data_prefix, f),
+                                  self.run_test, test_temp_dir)
+        return c
+
+    def run_test(self, testcase: DataDrivenTestCase) -> None:
+        """Perform a test case."""
+        try:
+            # Build test case input.
+            src = '\n'.join(testcase.input)
+            result = build.build(sources=[BuildSource('main', None, src)],
+                                 options=get_semanal_options(),
+                                 alt_lib_path=test_temp_dir)
+            a = result.errors
+            if a:
+                raise CompileError(a)
+
+            # Collect all TypeInfos in top-level modules.
+            typeinfos = TypeInfoMap()
+            for f in result.files.values():
+                for n in f.names.values():
+                    if isinstance(n.node, TypeInfo):
+                        typeinfos[n.fullname] = n.node
+
+            # The output is the symbol table converted into a string.
+            a = str(typeinfos).split('\n')
+        except CompileError as e:
+            a = e.messages
+        assert_string_arrays_equal(
+            testcase.output, a,
+            'Invalid semantic analyzer output ({}, line {})'.format(
+                testcase.file, testcase.line))
+
+
+class TypeInfoMap(Dict[str, TypeInfo]):
+    def __str__(self) -> str:
+        a = ['TypeInfoMap(']  # type: List[str]
+        for x, y in sorted(self.items()):
+            if isinstance(x, str) and (not x.startswith('builtins.') and
+                                       not x.startswith('typing.') and
+                                       not x.startswith('abc.')):
+                ti = ('\n' + '  ').join(str(y).split('\n'))
+                a.append('  {} : {}'.format(x, ti))
+        a[-1] += ')'
+        return '\n'.join(a)
diff --git a/mypy/test/testsolve.py b/mypy/test/testsolve.py
new file mode 100644
index 0000000..e407f75
--- /dev/null
+++ b/mypy/test/testsolve.py
@@ -0,0 +1,156 @@
+"""Test cases for the constraint solver used in type inference."""
+
+from typing import List, Union, Tuple
+
+from mypy.myunit import Suite, assert_equal
+from mypy.constraints import SUPERTYPE_OF, SUBTYPE_OF, Constraint
+from mypy.solve import solve_constraints
+from mypy.typefixture import TypeFixture
+from mypy.types import Type, TypeVarType, TypeVarId
+
+
+class SolveSuite(Suite):
+    def __init__(self) -> None:
+        super().__init__()
+        self.fx = TypeFixture()
+
+    def test_empty_input(self) -> None:
+        self.assert_solve([], [], [])
+
+    def test_simple_supertype_constraints(self) -> None:
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.a)],
+                          [(self.fx.a, self.fx.o)])
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.a),
+                           self.supc(self.fx.t, self.fx.b)],
+                          [(self.fx.a, self.fx.o)])
+
+    def test_simple_subtype_constraints(self) -> None:
+        self.assert_solve([self.fx.t.id],
+                          [self.subc(self.fx.t, self.fx.a)],
+                          [self.fx.a])
+        self.assert_solve([self.fx.t.id],
+                          [self.subc(self.fx.t, self.fx.a),
+                           self.subc(self.fx.t, self.fx.b)],
+                          [self.fx.b])
+
+    def test_both_kinds_of_constraints(self) -> None:
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.b),
+                           self.subc(self.fx.t, self.fx.a)],
+                          [(self.fx.b, self.fx.a)])
+
+    def test_unsatisfiable_constraints(self) -> None:
+        # The constraints are impossible to satisfy.
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.a),
+                           self.subc(self.fx.t, self.fx.b)],
+                          [None])
+
+    def test_exactly_specified_result(self) -> None:
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.b),
+                           self.subc(self.fx.t, self.fx.b)],
+                          [(self.fx.b, self.fx.b)])
+
+    def test_multiple_variables(self) -> None:
+        self.assert_solve([self.fx.t.id, self.fx.s.id],
+                          [self.supc(self.fx.t, self.fx.b),
+                           self.supc(self.fx.s, self.fx.c),
+                           self.subc(self.fx.t, self.fx.a)],
+                          [(self.fx.b, self.fx.a), (self.fx.c, self.fx.o)])
+
+    def test_no_constraints_for_var(self) -> None:
+        self.assert_solve([self.fx.t.id],
+                          [],
+                          [self.fx.nonet])
+        self.assert_solve([self.fx.t.id, self.fx.s.id],
+                          [],
+                          [self.fx.nonet, self.fx.nonet])
+        self.assert_solve([self.fx.t.id, self.fx.s.id],
+                          [self.supc(self.fx.s, self.fx.a)],
+                          [self.fx.nonet, (self.fx.a, self.fx.o)])
+
+    def test_void_constraints(self) -> None:
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.void)],
+                          [(self.fx.void, self.fx.void)])
+        self.assert_solve([self.fx.t.id],
+                          [self.subc(self.fx.t, self.fx.void)],
+                          [(self.fx.void, self.fx.void)])
+
+        # Both bounds void.
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.void),
+                           self.subc(self.fx.t, self.fx.void)],
+                          [(self.fx.void, self.fx.void)])
+
+        # Cannot infer any type.
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.a),
+                           self.supc(self.fx.t, self.fx.void)],
+                          [None])
+        self.assert_solve([self.fx.t.id],
+                          [self.subc(self.fx.t, self.fx.a),
+                           self.subc(self.fx.t, self.fx.void)],
+                          [None])
+
+    def test_simple_constraints_with_dynamic_type(self) -> None:
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.anyt)],
+                          [(self.fx.anyt, self.fx.anyt)])
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.anyt),
+                           self.supc(self.fx.t, self.fx.anyt)],
+                          [(self.fx.anyt, self.fx.anyt)])
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.anyt),
+                           self.supc(self.fx.t, self.fx.a)],
+                          [(self.fx.anyt, self.fx.anyt)])
+
+        self.assert_solve([self.fx.t.id],
+                          [self.subc(self.fx.t, self.fx.anyt)],
+                          [(self.fx.anyt, self.fx.anyt)])
+        self.assert_solve([self.fx.t.id],
+                          [self.subc(self.fx.t, self.fx.anyt),
+                           self.subc(self.fx.t, self.fx.anyt)],
+                          [(self.fx.anyt, self.fx.anyt)])
+        # self.assert_solve([self.fx.t.id],
+        #                   [self.subc(self.fx.t, self.fx.anyt),
+        #                    self.subc(self.fx.t, self.fx.a)],
+        #                   [(self.fx.anyt, self.fx.anyt)])
+        # TODO: figure out what this should be after changes to meet(any, X)
+
+    def test_both_normal_and_any_types_in_results(self) -> None:
+        # If one of the bounds is any, we promote the other bound to
+        # any as well, since otherwise the type range does not make sense.
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.a),
+                           self.subc(self.fx.t, self.fx.anyt)],
+                          [(self.fx.anyt, self.fx.anyt)])
+
+        self.assert_solve([self.fx.t.id],
+                          [self.supc(self.fx.t, self.fx.anyt),
+                           self.subc(self.fx.t, self.fx.a)],
+                          [(self.fx.anyt, self.fx.anyt)])
+
+    def assert_solve(self,
+                     vars: List[TypeVarId],
+                     constraints: List[Constraint],
+                     results: List[Union[Type, Tuple[Type, Type]]],
+                     ) -> None:
+        res = []
+        for r in results:
+            if isinstance(r, tuple):
+                res.append(r[0])
+            else:
+                res.append(r)
+        actual = solve_constraints(vars, constraints)
+        assert_equal(str(actual), str(res))
+
+    def supc(self, type_var: TypeVarType, bound: Type) -> Constraint:
+        return Constraint(type_var.id, SUPERTYPE_OF, bound)
+
+    def subc(self, type_var: TypeVarType, bound: Type) -> Constraint:
+        return Constraint(type_var.id, SUBTYPE_OF, bound)
diff --git a/mypy/test/teststubgen.py b/mypy/test/teststubgen.py
new file mode 100644
index 0000000..0be61ca
--- /dev/null
+++ b/mypy/test/teststubgen.py
@@ -0,0 +1,186 @@
+import glob
+import importlib
+import os.path
+import random
+import shutil
+import sys
+import tempfile
+import time
+
+from typing import List, Tuple
+
+from mypy.myunit import Suite, AssertionFailure, assert_equal
+from mypy.test.helpers import assert_string_arrays_equal
+from mypy.test.data import parse_test_cases, DataDrivenTestCase
+from mypy.test import config
+from mypy.parse import parse
+from mypy.errors import CompileError
+from mypy.stubgen import generate_stub, generate_stub_for_module
+from mypy.stubgenc import infer_method_sig
+from mypy.stubutil import (
+    parse_signature, parse_all_signatures, build_signature, find_unique_signatures,
+    infer_sig_from_docstring
+)
+
+
+class StubgenUtilSuite(Suite):
+    def test_parse_signature(self) -> None:
+        self.assert_parse_signature('func()', ('func', [], []))
+
+    def test_parse_signature_with_args(self) -> None:
+        self.assert_parse_signature('func(arg)', ('func', ['arg'], []))
+        self.assert_parse_signature('do(arg, arg2)', ('do', ['arg', 'arg2'], []))
+
+    def test_parse_signature_with_optional_args(self) -> None:
+        self.assert_parse_signature('func([arg])', ('func', [], ['arg']))
+        self.assert_parse_signature('func(arg[, arg2])', ('func', ['arg'], ['arg2']))
+        self.assert_parse_signature('func([arg[, arg2]])', ('func', [], ['arg', 'arg2']))
+
+    def test_parse_signature_with_default_arg(self) -> None:
+        self.assert_parse_signature('func(arg=None)', ('func', [], ['arg']))
+        self.assert_parse_signature('func(arg, arg2=None)', ('func', ['arg'], ['arg2']))
+        self.assert_parse_signature('func(arg=1, arg2="")', ('func', [], ['arg', 'arg2']))
+
+    def test_parse_signature_with_qualified_function(self) -> None:
+        self.assert_parse_signature('ClassName.func(arg)', ('func', ['arg'], []))
+
+    def test_parse_signature_with_kw_only_arg(self) -> None:
+        self.assert_parse_signature('ClassName.func(arg, *, arg2=1)',
+                                    ('func', ['arg', '*'], ['arg2']))
+
+    def test_parse_signature_with_star_arg(self) -> None:
+        self.assert_parse_signature('ClassName.func(arg, *args)',
+                                    ('func', ['arg', '*args'], []))
+
+    def test_parse_signature_with_star_star_arg(self) -> None:
+        self.assert_parse_signature('ClassName.func(arg, **args)',
+                                    ('func', ['arg', '**args'], []))
+
+    def assert_parse_signature(self, sig: str, result: Tuple[str, List[str], List[str]]) -> None:
+        assert_equal(parse_signature(sig), result)
+
+    def test_build_signature(self) -> None:
+        assert_equal(build_signature([], []), '()')
+        assert_equal(build_signature(['arg'], []), '(arg)')
+        assert_equal(build_signature(['arg', 'arg2'], []), '(arg, arg2)')
+        assert_equal(build_signature(['arg'], ['arg2']), '(arg, arg2=...)')
+        assert_equal(build_signature(['arg'], ['arg2', '**x']), '(arg, arg2=..., **x)')
+
+    def test_parse_all_signatures(self) -> None:
+        assert_equal(parse_all_signatures(['random text',
+                                           '.. function:: fn(arg',
+                                           '.. function:: fn()',
+                                           '  .. method:: fn2(arg)']),
+                     ([('fn', '()'),
+                       ('fn2', '(arg)')], []))
+
+    def test_find_unique_signatures(self) -> None:
+        assert_equal(find_unique_signatures(
+            [('func', '()'),
+             ('func', '()'),
+             ('func2', '()'),
+             ('func2', '(arg)'),
+             ('func3', '(arg, arg2)')]),
+            [('func', '()'),
+             ('func3', '(arg, arg2)')])
+
+    def test_infer_sig_from_docstring(self) -> None:
+        assert_equal(infer_sig_from_docstring('\nfunc(x) - y', 'func'), '(x)')
+        assert_equal(infer_sig_from_docstring('\nfunc(x, Y_a=None)', 'func'), '(x, Y_a=None)')
+        assert_equal(infer_sig_from_docstring('\nafunc(x) - y', 'func'), None)
+        assert_equal(infer_sig_from_docstring('\nfunc(x, y', 'func'), None)
+        assert_equal(infer_sig_from_docstring('\nfunc(x=z(y))', 'func'), None)
+        assert_equal(infer_sig_from_docstring('\nfunc x', 'func'), None)
+
+
+class StubgenPythonSuite(Suite):
+    test_data_files = ['stubgen.test']
+
+    def cases(self) -> List[DataDrivenTestCase]:
+        c = []  # type: List[DataDrivenTestCase]
+        for path in self.test_data_files:
+            c += parse_test_cases(os.path.join(config.test_data_prefix, path), test_stubgen)
+        return c
+
+
+def test_stubgen(testcase: DataDrivenTestCase) -> None:
+    if 'stubgen-test-path' not in sys.path:
+        sys.path.insert(0, 'stubgen-test-path')
+    os.mkdir('stubgen-test-path')
+    source = '\n'.join(testcase.input)
+    handle = tempfile.NamedTemporaryFile(prefix='prog_', suffix='.py', dir='stubgen-test-path',
+                                         delete=False)
+    assert os.path.isabs(handle.name)
+    path = os.path.basename(handle.name)
+    name = path[:-3]
+    path = os.path.join('stubgen-test-path', path)
+    out_dir = '_out'
+    os.mkdir(out_dir)
+    try:
+        handle.write(bytes(source, 'ascii'))
+        handle.close()
+        # Without this we may sometimes be unable to import the module below, as importlib
+        # caches os.listdir() results in Python 3.3+ (Guido explained this to me).
+        reset_importlib_caches()
+        try:
+            if testcase.name.endswith('_import'):
+                generate_stub_for_module(name, out_dir, quiet=True)
+            elif testcase.name.endswith('_fast_parser'):
+                generate_stub(path, out_dir, fast_parser=True)
+            else:
+                generate_stub(path, out_dir)
+            a = load_output(out_dir)
+        except CompileError as e:
+            a = e.messages
+        assert_string_arrays_equal(testcase.output, a,
+                                   'Invalid output ({}, line {})'.format(
+                                       testcase.file, testcase.line))
+    finally:
+        handle.close()
+        os.unlink(handle.name)
+        shutil.rmtree(out_dir)
+
+
+def reset_importlib_caches() -> None:
+    try:
+        importlib.invalidate_caches()
+    except (ImportError, AttributeError):
+        pass
+
+
+def load_output(dirname: str) -> List[str]:
+    result = []  # type: List[str]
+    entries = glob.glob('%s/*' % dirname)
+    assert entries, 'No files generated'
+    if len(entries) == 1:
+        add_file(entries[0], result)
+    else:
+        for entry in entries:
+            result.append('## %s ##' % entry)
+            add_file(entry, result)
+    return result
+
+
+def add_file(path: str, result: List[str]) -> None:
+    with open(path) as file:
+        result.extend(file.read().splitlines())
+
+
+class StubgencSuite(Suite):
+    def test_infer_hash_sig(self) -> None:
+        assert_equal(infer_method_sig('__hash__'), '()')
+
+    def test_infer_getitem_sig(self) -> None:
+        assert_equal(infer_method_sig('__getitem__'), '(index)')
+
+    def test_infer_setitem_sig(self) -> None:
+        assert_equal(infer_method_sig('__setitem__'), '(index, object)')
+
+    def test_infer_binary_op_sig(self) -> None:
+        for op in ('eq', 'ne', 'lt', 'le', 'gt', 'ge',
+                   'add', 'radd', 'sub', 'rsub', 'mul', 'rmul'):
+            assert_equal(infer_method_sig('__%s__' % op), '(other)')
+
+    def test_infer_unary_op_sig(self) -> None:
+        for op in ('neg', 'pos'):
+            assert_equal(infer_method_sig('__%s__' % op), '()')
diff --git a/mypy/test/testsubtypes.py b/mypy/test/testsubtypes.py
new file mode 100644
index 0000000..307cb7a
--- /dev/null
+++ b/mypy/test/testsubtypes.py
@@ -0,0 +1,208 @@
+from mypy.myunit import Suite, assert_true
+from mypy.nodes import CONTRAVARIANT, INVARIANT, COVARIANT
+from mypy.subtypes import is_subtype
+from mypy.typefixture import TypeFixture, InterfaceTypeFixture
+from mypy.types import Type
+
+
+class SubtypingSuite(Suite):
+    def set_up(self) -> None:
+        self.fx = TypeFixture(INVARIANT)
+        self.fx_contra = TypeFixture(CONTRAVARIANT)
+        self.fx_co = TypeFixture(COVARIANT)
+
+    def test_trivial_cases(self) -> None:
+        for simple in self.fx_co.void, self.fx_co.a, self.fx_co.o, self.fx_co.b:
+            self.assert_subtype(simple, simple)
+
+    def test_instance_subtyping(self) -> None:
+        self.assert_strict_subtype(self.fx.a, self.fx.o)
+        self.assert_strict_subtype(self.fx.b, self.fx.o)
+        self.assert_strict_subtype(self.fx.b, self.fx.a)
+
+        self.assert_not_subtype(self.fx.a, self.fx.d)
+        self.assert_not_subtype(self.fx.b, self.fx.c)
+
+    def test_simple_generic_instance_subtyping_invariant(self) -> None:
+        self.assert_subtype(self.fx.ga, self.fx.ga)
+        self.assert_subtype(self.fx.hab, self.fx.hab)
+
+        self.assert_not_subtype(self.fx.ga, self.fx.g2a)
+        self.assert_not_subtype(self.fx.ga, self.fx.gb)
+        self.assert_not_subtype(self.fx.gb, self.fx.ga)
+
+    def test_simple_generic_instance_subtyping_covariant(self) -> None:
+        self.assert_subtype(self.fx_co.ga, self.fx_co.ga)
+        self.assert_subtype(self.fx_co.hab, self.fx_co.hab)
+
+        self.assert_not_subtype(self.fx_co.ga, self.fx_co.g2a)
+        self.assert_not_subtype(self.fx_co.ga, self.fx_co.gb)
+        self.assert_subtype(self.fx_co.gb, self.fx_co.ga)
+
+    def test_simple_generic_instance_subtyping_contravariant(self) -> None:
+        self.assert_subtype(self.fx_contra.ga, self.fx_contra.ga)
+        self.assert_subtype(self.fx_contra.hab, self.fx_contra.hab)
+
+        self.assert_not_subtype(self.fx_contra.ga, self.fx_contra.g2a)
+        self.assert_subtype(self.fx_contra.ga, self.fx_contra.gb)
+        self.assert_not_subtype(self.fx_contra.gb, self.fx_contra.ga)
+
+    def test_generic_subtyping_with_inheritance_invariant(self) -> None:
+        self.assert_subtype(self.fx.gsab, self.fx.gb)
+        self.assert_not_subtype(self.fx.gsab, self.fx.ga)
+        self.assert_not_subtype(self.fx.gsaa, self.fx.gb)
+
+    def test_generic_subtyping_with_inheritance_covariant(self) -> None:
+        self.assert_subtype(self.fx_co.gsab, self.fx_co.gb)
+        self.assert_subtype(self.fx_co.gsab, self.fx_co.ga)
+        self.assert_not_subtype(self.fx_co.gsaa, self.fx_co.gb)
+
+    def test_generic_subtyping_with_inheritance_contravariant(self) -> None:
+        self.assert_subtype(self.fx_contra.gsab, self.fx_contra.gb)
+        self.assert_not_subtype(self.fx_contra.gsab, self.fx_contra.ga)
+        self.assert_subtype(self.fx_contra.gsaa, self.fx_contra.gb)
+
+    def test_interface_subtyping(self) -> None:
+        self.assert_subtype(self.fx.e, self.fx.f)
+        self.assert_equivalent(self.fx.f, self.fx.f)
+        self.assert_not_subtype(self.fx.a, self.fx.f)
+
+    def test_generic_interface_subtyping(self) -> None:
+        # TODO make this work
+        self.skip()
+
+        fx2 = InterfaceTypeFixture()
+
+        self.assert_subtype(fx2.m1, fx2.gfa)
+        self.assert_not_subtype(fx2.m1, fx2.gfb)
+
+        self.assert_equivalent(fx2.gfa, fx2.gfa)
+
+    def test_basic_callable_subtyping(self) -> None:
+        self.assert_strict_subtype(self.fx.callable(self.fx.o, self.fx.d),
+                                   self.fx.callable(self.fx.a, self.fx.d))
+        self.assert_strict_subtype(self.fx.callable(self.fx.d, self.fx.b),
+                                   self.fx.callable(self.fx.d, self.fx.a))
+
+        self.assert_unrelated(self.fx.callable(self.fx.a, self.fx.a),
+                              self.fx.callable(self.fx.a, self.fx.void))
+
+        self.assert_unrelated(
+            self.fx.callable(self.fx.a, self.fx.a, self.fx.a),
+            self.fx.callable(self.fx.a, self.fx.a))
+
+    def test_default_arg_callable_subtyping(self) -> None:
+        self.assert_strict_subtype(
+            self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a),
+            self.fx.callable(self.fx.a, self.fx.d, self.fx.a))
+
+        self.assert_strict_subtype(
+            self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a),
+            self.fx.callable(self.fx.a, self.fx.a))
+
+        self.assert_strict_subtype(
+            self.fx.callable_default(0, self.fx.a, self.fx.d, self.fx.a),
+            self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a))
+
+        self.assert_unrelated(
+            self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a),
+            self.fx.callable(self.fx.d, self.fx.d, self.fx.a))
+
+        self.assert_unrelated(
+            self.fx.callable_default(0, self.fx.a, self.fx.d, self.fx.a),
+            self.fx.callable_default(1, self.fx.a, self.fx.a, self.fx.a))
+
+        self.assert_unrelated(
+            self.fx.callable_default(1, self.fx.a, self.fx.a),
+            self.fx.callable(self.fx.a, self.fx.a, self.fx.a))
+
+    def test_var_arg_callable_subtyping_1(self) -> None:
+        self.assert_strict_subtype(
+            self.fx.callable_var_arg(0, self.fx.a, self.fx.a),
+            self.fx.callable_var_arg(0, self.fx.b, self.fx.a))
+
+    def test_var_arg_callable_subtyping_2(self) -> None:
+        self.assert_strict_subtype(
+            self.fx.callable_var_arg(0, self.fx.a, self.fx.a),
+            self.fx.callable(self.fx.b, self.fx.a))
+
+    def test_var_arg_callable_subtyping_3(self) -> None:
+        self.assert_strict_subtype(
+            self.fx.callable_var_arg(0, self.fx.a, self.fx.a),
+            self.fx.callable(self.fx.a))
+
+    def test_var_arg_callable_subtyping_4(self) -> None:
+        self.assert_strict_subtype(
+            self.fx.callable_var_arg(1, self.fx.a, self.fx.d, self.fx.a),
+            self.fx.callable(self.fx.b, self.fx.a))
+
+    def test_var_arg_callable_subtyping_5(self) -> None:
+        self.assert_strict_subtype(
+            self.fx.callable_var_arg(0, self.fx.a, self.fx.d, self.fx.a),
+            self.fx.callable(self.fx.b, self.fx.a))
+
+    def test_var_arg_callable_subtyping_6(self) -> None:
+        self.assert_strict_subtype(
+            self.fx.callable_var_arg(0, self.fx.a, self.fx.f, self.fx.d),
+            self.fx.callable_var_arg(0, self.fx.b, self.fx.e, self.fx.d))
+
+    def test_var_arg_callable_subtyping_7(self) -> None:
+        self.assert_not_subtype(
+            self.fx.callable_var_arg(0, self.fx.b, self.fx.d),
+            self.fx.callable(self.fx.a, self.fx.d))
+
+    def test_var_arg_callable_subtyping_8(self) -> None:
+        self.assert_not_subtype(
+            self.fx.callable_var_arg(0, self.fx.b, self.fx.d),
+            self.fx.callable_var_arg(0, self.fx.a, self.fx.a, self.fx.d))
+        self.assert_subtype(
+            self.fx.callable_var_arg(0, self.fx.a, self.fx.d),
+            self.fx.callable_var_arg(0, self.fx.b, self.fx.b, self.fx.d))
+
+    def test_var_arg_callable_subtyping_9(self) -> None:
+        self.assert_not_subtype(
+            self.fx.callable_var_arg(0, self.fx.b, self.fx.b, self.fx.d),
+            self.fx.callable_var_arg(0, self.fx.a, self.fx.d))
+        self.assert_subtype(
+            self.fx.callable_var_arg(0, self.fx.a, self.fx.a, self.fx.d),
+            self.fx.callable_var_arg(0, self.fx.b, self.fx.d))
+
+    def test_type_callable_subtyping(self) -> None:
+        self.assert_strict_subtype(
+            self.fx.callable_type(self.fx.d, self.fx.a), self.fx.type_type)
+
+        self.assert_strict_subtype(
+            self.fx.callable_type(self.fx.d, self.fx.b),
+            self.fx.callable(self.fx.d, self.fx.a))
+
+        self.assert_strict_subtype(self.fx.callable_type(self.fx.a, self.fx.b),
+                                   self.fx.callable(self.fx.a, self.fx.b))
+
+    # IDEA: Maybe add these test cases (they are tested pretty well in type
+    #       checker tests already):
+    #  * more interface subtyping test cases
+    #  * more generic interface subtyping test cases
+    #  * type variables
+    #  * tuple types
+    #  * void type
+    #  * None type
+    #  * any type
+    #  * generic function types
+
+    def assert_subtype(self, s: Type, t: Type) -> None:
+        assert_true(is_subtype(s, t), '{} not subtype of {}'.format(s, t))
+
+    def assert_not_subtype(self, s: Type, t: Type) -> None:
+        assert_true(not is_subtype(s, t), '{} subtype of {}'.format(s, t))
+
+    def assert_strict_subtype(self, s: Type, t: Type) -> None:
+        self.assert_subtype(s, t)
+        self.assert_not_subtype(t, s)
+
+    def assert_equivalent(self, s: Type, t: Type) -> None:
+        self.assert_subtype(s, t)
+        self.assert_subtype(t, s)
+
+    def assert_unrelated(self, s: Type, t: Type) -> None:
+        self.assert_not_subtype(s, t)
+        self.assert_not_subtype(t, s)
diff --git a/mypy/test/testtransform.py b/mypy/test/testtransform.py
new file mode 100644
index 0000000..1dac308
--- /dev/null
+++ b/mypy/test/testtransform.py
@@ -0,0 +1,85 @@
+"""Identity AST transform test cases"""
+
+import os.path
+
+from typing import Dict, List
+
+from mypy import build
+from mypy.build import BuildSource
+from mypy.myunit import Suite
+from mypy.test.helpers import assert_string_arrays_equal, testfile_pyversion
+from mypy.test.data import parse_test_cases, DataDrivenTestCase
+from mypy.test.config import test_data_prefix, test_temp_dir
+from mypy.errors import CompileError
+from mypy.nodes import TypeInfo
+from mypy.treetransform import TransformVisitor
+from mypy.types import Type
+from mypy.options import Options
+
+
+class TransformSuite(Suite):
+    # Reuse semantic analysis test cases.
+    transform_files = ['semanal-basic.test',
+                       'semanal-expressions.test',
+                       'semanal-classes.test',
+                       'semanal-types.test',
+                       'semanal-modules.test',
+                       'semanal-statements.test',
+                       'semanal-abstractclasses.test',
+                       'semanal-python2.test']
+
+    def cases(self) -> List[DataDrivenTestCase]:
+        c = []  # type: List[DataDrivenTestCase]
+        for f in self.transform_files:
+            c += parse_test_cases(os.path.join(test_data_prefix, f),
+                                  test_transform,
+                                  base_path=test_temp_dir,
+                                  native_sep=True)
+        return c
+
+
+def test_transform(testcase: DataDrivenTestCase) -> None:
+    """Perform an identity transform test case."""
+
+    try:
+        src = '\n'.join(testcase.input)
+        options = Options()
+        options.use_builtins_fixtures = True
+        options.semantic_analysis_only = True
+        options.show_traceback = True
+        options.python_version = testfile_pyversion(testcase.file)
+        result = build.build(sources=[BuildSource('main', None, src)],
+                             options=options,
+                             alt_lib_path=test_temp_dir)
+        a = result.errors
+        if a:
+            raise CompileError(a)
+        # Include string representations of the source files in the actual
+        # output.
+        for fnam in sorted(result.files.keys()):
+            f = result.files[fnam]
+
+            # Omit the builtins module and files with a special marker in the
+            # path.
+            # TODO the test is not reliable
+            if (not f.path.endswith((os.sep + 'builtins.pyi',
+                                     'typing.pyi',
+                                     'abc.pyi'))
+                    and not os.path.basename(f.path).startswith('_')
+                    and not os.path.splitext(
+                        os.path.basename(f.path))[0].endswith('_')):
+                t = TestTransformVisitor()
+                f = t.mypyfile(f)
+                a += str(f).split('\n')
+    except CompileError as e:
+        a = e.messages
+    assert_string_arrays_equal(
+        testcase.output, a,
+        'Invalid semantic analyzer output ({}, line {})'.format(testcase.file,
+                                                                testcase.line))
+
+
+class TestTransformVisitor(TransformVisitor):
+    def type(self, type: Type) -> Type:
+        assert type is not None
+        return type
diff --git a/mypy/test/testtypegen.py b/mypy/test/testtypegen.py
new file mode 100644
index 0000000..a465c71
--- /dev/null
+++ b/mypy/test/testtypegen.py
@@ -0,0 +1,128 @@
+"""Test cases for the type checker: exporting inferred types"""
+
+import os.path
+import re
+
+from typing import Set, List
+
+from mypy import build
+from mypy.build import BuildSource
+from mypy.myunit import Suite
+from mypy.test import config
+from mypy.test.data import parse_test_cases, DataDrivenTestCase
+from mypy.test.helpers import assert_string_arrays_equal
+from mypy.util import short_type
+from mypy.nodes import (
+    NameExpr, TypeVarExpr, CallExpr, Expression, MypyFile, AssignmentStmt, IntExpr
+)
+from mypy.traverser import TraverserVisitor
+from mypy.errors import CompileError
+from mypy.options import Options
+
+
+class TypeExportSuite(Suite):
+    # List of files that contain test case descriptions.
+    files = ['typexport-basic.test']
+
+    def cases(self) -> List[DataDrivenTestCase]:
+        c = []  # type: List[DataDrivenTestCase]
+        for f in self.files:
+            c += parse_test_cases(os.path.join(config.test_data_prefix, f),
+                                  self.run_test, config.test_temp_dir)
+        return c
+
+    def run_test(self, testcase: DataDrivenTestCase) -> None:
+        try:
+            line = testcase.input[0]
+            mask = ''
+            if line.startswith('##'):
+                mask = '(' + line[2:].strip() + ')$'
+
+            src = '\n'.join(testcase.input)
+            options = Options()
+            options.use_builtins_fixtures = True
+            options.show_traceback = True
+            result = build.build(sources=[BuildSource('main', None, src)],
+                                 options=options,
+                                 alt_lib_path=config.test_temp_dir)
+            a = result.errors
+            map = result.types
+            nodes = map.keys()
+
+            # Ignore NameExpr nodes of variables with explicit (trivial) types
+            # to simplify output.
+            searcher = SkippedNodeSearcher()
+            for file in result.files.values():
+                file.accept(searcher)
+            ignored = searcher.nodes
+
+            # Filter nodes that should be included in the output.
+            keys = []
+            for node in nodes:
+                if node.line is not None and node.line != -1 and map[node]:
+                    if ignore_node(node) or node in ignored:
+                        continue
+                    if (re.match(mask, short_type(node))
+                            or (isinstance(node, NameExpr)
+                                and re.match(mask, node.name))):
+                        # Include node in output.
+                        keys.append(node)
+
+            for key in sorted(keys,
+                              key=lambda n: (n.line, short_type(n),
+                                             str(n) + str(map[n]))):
+                ts = str(map[key]).replace('*', '')  # Remove erased tags
+                ts = ts.replace('__main__.', '')
+                a.append('{}({}) : {}'.format(short_type(key), key.line, ts))
+        except CompileError as e:
+            a = e.messages
+        assert_string_arrays_equal(
+            testcase.output, a,
+            'Invalid type checker output ({}, line {})'.format(testcase.file,
+                                                               testcase.line))
+
+
+class SkippedNodeSearcher(TraverserVisitor):
+    def __init__(self) -> None:
+        self.nodes = set()  # type: Set[Expression]
+        self.is_typing = False
+
+    def visit_mypy_file(self, f: MypyFile) -> None:
+        self.is_typing = f.fullname() == 'typing'
+        super().visit_mypy_file(f)
+
+    def visit_assignment_stmt(self, s: AssignmentStmt) -> None:
+        if s.type or ignore_node(s.rvalue):
+            for lvalue in s.lvalues:
+                if isinstance(lvalue, NameExpr):
+                    self.nodes.add(lvalue)
+        super().visit_assignment_stmt(s)
+
+    def visit_name_expr(self, n: NameExpr) -> None:
+        self.skip_if_typing(n)
+
+    def visit_int_expr(self, n: IntExpr) -> None:
+        self.skip_if_typing(n)
+
+    def skip_if_typing(self, n: Expression) -> None:
+        if self.is_typing:
+            self.nodes.add(n)
+
+
+def ignore_node(node: Expression) -> bool:
+    """Return True if node is to be omitted from test case output."""
+
+    # We want to get rid of object() expressions in the typing module stub
+    # and also TypeVar(...) expressions. Since detecting whether a node comes
+    # from the typing module is not easy, we just to strip them all away.
+    if isinstance(node, TypeVarExpr):
+        return True
+    if isinstance(node, NameExpr) and node.fullname == 'builtins.object':
+        return True
+    if isinstance(node, NameExpr) and node.fullname == 'builtins.None':
+        return True
+    if isinstance(node, CallExpr) and (ignore_node(node.callee) or
+                                       node.analyzed):
+        return True
+
+    return False
diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py
new file mode 100644
index 0000000..4364848
--- /dev/null
+++ b/mypy/test/testtypes.py
@@ -0,0 +1,847 @@
+"""Test cases for mypy types and type operations."""
+
+from typing import List, Tuple
+
+from mypy.myunit import (
+    Suite, assert_equal, assert_true, assert_false, assert_type
+)
+from mypy.erasetype import erase_type
+from mypy.expandtype import expand_type
+from mypy.join import join_types, join_simple
+from mypy.meet import meet_types
+from mypy.types import (
+    UnboundType, AnyType, Void, CallableType, TupleType, TypeVarDef, Type,
+    Instance, NoneTyp, ErrorType, Overloaded, TypeType, UnionType, UninhabitedType,
+    true_only, false_only, TypeVarId
+)
+from mypy.nodes import ARG_POS, ARG_OPT, ARG_STAR, CONTRAVARIANT, INVARIANT, COVARIANT
+from mypy.subtypes import is_subtype, is_more_precise, is_proper_subtype
+from mypy.typefixture import TypeFixture, InterfaceTypeFixture
+
+
+class TypesSuite(Suite):
+    def __init__(self) -> None:
+        super().__init__()
+        self.x = UnboundType('X')  # Helpers
+        self.y = UnboundType('Y')
+        self.fx = TypeFixture()
+        self.function = self.fx.function
+
+    def test_any(self) -> None:
+        assert_equal(str(AnyType()), 'Any')
+
+    def test_simple_unbound_type(self) -> None:
+        u = UnboundType('Foo')
+        assert_equal(str(u), 'Foo?')
+
+    def test_generic_unbound_type(self) -> None:
+        u = UnboundType('Foo', [UnboundType('T'), AnyType()])
+        assert_equal(str(u), 'Foo?[T?, Any]')
+
+    def test_void_type(self) -> None:
+        assert_equal(str(Void(None)), 'void')
+
+    def test_callable_type(self) -> None:
+        c = CallableType([self.x, self.y],
+                         [ARG_POS, ARG_POS],
+                         [None, None],
+                         AnyType(), self.function)
+        assert_equal(str(c), 'def (X?, Y?) -> Any')
+
+        c2 = CallableType([], [], [], Void(None), None)
+        assert_equal(str(c2), 'def ()')
+
+    def test_callable_type_with_default_args(self) -> None:
+        c = CallableType([self.x, self.y], [ARG_POS, ARG_OPT], [None, None],
+                     AnyType(), self.function)
+        assert_equal(str(c), 'def (X?, Y? =) -> Any')
+
+        c2 = CallableType([self.x, self.y], [ARG_OPT, ARG_OPT], [None, None],
+                      AnyType(), self.function)
+        assert_equal(str(c2), 'def (X? =, Y? =) -> Any')
+
+    def test_callable_type_with_var_args(self) -> None:
+        c = CallableType([self.x], [ARG_STAR], [None], AnyType(), self.function)
+        assert_equal(str(c), 'def (*X?) -> Any')
+
+        c2 = CallableType([self.x, self.y], [ARG_POS, ARG_STAR],
+                      [None, None], AnyType(), self.function)
+        assert_equal(str(c2), 'def (X?, *Y?) -> Any')
+
+        c3 = CallableType([self.x, self.y], [ARG_OPT, ARG_STAR], [None, None],
+                      AnyType(), self.function)
+        assert_equal(str(c3), 'def (X? =, *Y?) -> Any')
+
+    def test_tuple_type(self) -> None:
+        assert_equal(str(TupleType([], None)), 'Tuple[]')
+        assert_equal(str(TupleType([self.x], None)), 'Tuple[X?]')
+        assert_equal(str(TupleType([self.x, AnyType()], None)), 'Tuple[X?, Any]')
+
+    def test_type_variable_binding(self) -> None:
+        assert_equal(str(TypeVarDef('X', 1, None, self.fx.o)), 'X')
+        assert_equal(str(TypeVarDef('X', 1, [self.x, self.y], self.fx.o)),
+                     'X in (X?, Y?)')
+
+    def test_generic_function_type(self) -> None:
+        c = CallableType([self.x, self.y], [ARG_POS, ARG_POS], [None, None],
+                     self.y, self.function, name=None,
+                     variables=[TypeVarDef('X', -1, None, self.fx.o)])
+        assert_equal(str(c), 'def [X] (X?, Y?) -> Y?')
+
+        v = [TypeVarDef('Y', -1, None, self.fx.o),
+             TypeVarDef('X', -2, None, self.fx.o)]
+        c2 = CallableType([], [], [], Void(None), self.function, name=None, variables=v)
+        assert_equal(str(c2), 'def [Y, X] ()')
+
+
+class TypeOpsSuite(Suite):
+    def set_up(self) -> None:
+        self.fx = TypeFixture(INVARIANT)
+        self.fx_co = TypeFixture(COVARIANT)
+        self.fx_contra = TypeFixture(CONTRAVARIANT)
+
+    # expand_type
+
+    def test_trivial_expand(self) -> None:
+        for t in (self.fx.a, self.fx.o, self.fx.t, self.fx.void, self.fx.nonet,
+                  self.tuple(self.fx.a),
+                  self.callable([], self.fx.a, self.fx.a), self.fx.anyt):
+            self.assert_expand(t, [], t)
+            self.assert_expand(t, [], t)
+            self.assert_expand(t, [], t)
+
+    def test_expand_naked_type_var(self) -> None:
+        self.assert_expand(self.fx.t, [(self.fx.t.id, self.fx.a)], self.fx.a)
+        self.assert_expand(self.fx.t, [(self.fx.s.id, self.fx.a)], self.fx.t)
+
+    def test_expand_basic_generic_types(self) -> None:
+        self.assert_expand(self.fx.gt, [(self.fx.t.id, self.fx.a)], self.fx.ga)
+
+    # IDEA: Add test cases for
+    #   tuple types
+    #   callable types
+    #   multiple arguments
+
+    def assert_expand(self,
+                      orig: Type,
+                      map_items: List[Tuple[TypeVarId, Type]],
+                      result: Type,
+                      ) -> None:
+        lower_bounds = {}
+
+        for id, t in map_items:
+            lower_bounds[id] = t
+
+        exp = expand_type(orig, lower_bounds)
+        # Remove erased tags (asterisks).
+        assert_equal(str(exp).replace('*', ''), str(result))
+
+    # erase_type
+
+    def test_trivial_erase(self) -> None:
+        for t in (self.fx.a, self.fx.o, self.fx.void, self.fx.nonet,
+                  self.fx.anyt, self.fx.err):
+            self.assert_erase(t, t)
+
+    def test_erase_with_type_variable(self) -> None:
+        self.assert_erase(self.fx.t, self.fx.anyt)
+
+    def test_erase_with_generic_type(self) -> None:
+        self.assert_erase(self.fx.ga, self.fx.gdyn)
+        self.assert_erase(self.fx.hab,
+                          Instance(self.fx.hi, [self.fx.anyt, self.fx.anyt]))
+
+    def test_erase_with_tuple_type(self) -> None:
+        self.assert_erase(self.tuple(self.fx.a), self.fx.std_tuple)
+
+    def test_erase_with_function_type(self) -> None:
+        self.assert_erase(self.fx.callable(self.fx.a, self.fx.b),
+                          self.fx.callable_type(self.fx.void))
+
+    def test_erase_with_type_object(self) -> None:
+        self.assert_erase(self.fx.callable_type(self.fx.a, self.fx.b),
+                          self.fx.callable_type(self.fx.void))
+
+    def test_erase_with_type_type(self) -> None:
+        self.assert_erase(self.fx.type_a, self.fx.type_a)
+        self.assert_erase(self.fx.type_t, self.fx.type_any)
+
+    def assert_erase(self, orig: Type, result: Type) -> None:
+        assert_equal(str(erase_type(orig)), str(result))
+
+    # is_more_precise
+
+    def test_is_more_precise(self) -> None:
+        fx = self.fx
+        assert_true(is_more_precise(fx.b, fx.a))
+        assert_true(is_more_precise(fx.b, fx.b))
+        assert_true(is_more_precise(fx.b, fx.b))
+        assert_true(is_more_precise(fx.b, fx.anyt))
+        assert_true(is_more_precise(self.tuple(fx.b, fx.a),
+                                    self.tuple(fx.b, fx.a)))
+
+        assert_false(is_more_precise(fx.a, fx.b))
+        assert_false(is_more_precise(fx.anyt, fx.b))
+        assert_false(is_more_precise(self.tuple(fx.b, fx.b),
+                                     self.tuple(fx.b, fx.a)))
+
+    # is_proper_subtype
+
+    def test_is_proper_subtype(self) -> None:
+        fx = self.fx
+
+        assert_true(is_proper_subtype(fx.a, fx.a))
+        assert_true(is_proper_subtype(fx.b, fx.a))
+        assert_true(is_proper_subtype(fx.b, fx.o))
+        assert_true(is_proper_subtype(fx.b, fx.o))
+
+        assert_false(is_proper_subtype(fx.a, fx.b))
+        assert_false(is_proper_subtype(fx.o, fx.b))
+
+        assert_true(is_proper_subtype(fx.anyt, fx.anyt))
+        assert_false(is_proper_subtype(fx.a, fx.anyt))
+        assert_false(is_proper_subtype(fx.anyt, fx.a))
+
+        assert_true(is_proper_subtype(fx.ga, fx.ga))
+        assert_true(is_proper_subtype(fx.gdyn, fx.gdyn))
+        assert_false(is_proper_subtype(fx.ga, fx.gdyn))
+        assert_false(is_proper_subtype(fx.gdyn, fx.ga))
+
+        assert_true(is_proper_subtype(fx.t, fx.t))
+        assert_false(is_proper_subtype(fx.t, fx.s))
+
+    def test_is_proper_subtype_covariance(self) -> None:
+        fx_co = self.fx_co
+
+        assert_true(is_proper_subtype(fx_co.gsab, fx_co.gb))
+        assert_true(is_proper_subtype(fx_co.gsab, fx_co.ga))
+        assert_false(is_proper_subtype(fx_co.gsaa, fx_co.gb))
+        assert_true(is_proper_subtype(fx_co.gb, fx_co.ga))
+        assert_false(is_proper_subtype(fx_co.ga, fx_co.gb))
+
+    def test_is_proper_subtype_contravariance(self) -> None:
+        fx_contra = self.fx_contra
+
+        assert_true(is_proper_subtype(fx_contra.gsab, fx_contra.gb))
+        assert_false(is_proper_subtype(fx_contra.gsab, fx_contra.ga))
+        assert_true(is_proper_subtype(fx_contra.gsaa, fx_contra.gb))
+        assert_false(is_proper_subtype(fx_contra.gb, fx_contra.ga))
+        assert_true(is_proper_subtype(fx_contra.ga, fx_contra.gb))
+
+    def test_is_proper_subtype_invariance(self) -> None:
+        fx = self.fx
+
+        assert_true(is_proper_subtype(fx.gsab, fx.gb))
+        assert_false(is_proper_subtype(fx.gsab, fx.ga))
+        assert_false(is_proper_subtype(fx.gsaa, fx.gb))
+        assert_false(is_proper_subtype(fx.gb, fx.ga))
+        assert_false(is_proper_subtype(fx.ga, fx.gb))
+
+    # can_be_true / can_be_false
+
+    def test_empty_tuple_always_false(self) -> None:
+        tuple_type = self.tuple()
+        assert_true(tuple_type.can_be_false)
+        assert_false(tuple_type.can_be_true)
+
+    def test_nonempty_tuple_always_true(self) -> None:
+        tuple_type = self.tuple(AnyType(), AnyType())
+        assert_true(tuple_type.can_be_true)
+        assert_false(tuple_type.can_be_false)
+
+    def test_union_can_be_true_if_any_true(self) -> None:
+        union_type = UnionType([self.fx.a, self.tuple()])
+        assert_true(union_type.can_be_true)
+
+    def test_union_can_not_be_true_if_none_true(self) -> None:
+        union_type = UnionType([self.tuple(), self.tuple()])
+        assert_false(union_type.can_be_true)
+
+    def test_union_can_be_false_if_any_false(self) -> None:
+        union_type = UnionType([self.fx.a, self.tuple()])
+        assert_true(union_type.can_be_false)
+
+    def test_union_can_not_be_false_if_none_false(self) -> None:
+        union_type = UnionType([self.tuple(self.fx.a), self.tuple(self.fx.d)])
+        assert_false(union_type.can_be_false)
+
+    # true_only / false_only
+
+    def test_true_only_of_false_type_is_uninhabited(self) -> None:
+        to = true_only(NoneTyp())
+        assert_type(UninhabitedType, to)
+
+    def test_true_only_of_true_type_is_idempotent(self) -> None:
+        always_true = self.tuple(AnyType())
+        to = true_only(always_true)
+        assert_true(always_true is to)
+
+    def test_true_only_of_instance(self) -> None:
+        to = true_only(self.fx.a)
+        assert_equal(str(to), "A")
+        assert_true(to.can_be_true)
+        assert_false(to.can_be_false)
+        assert_type(Instance, to)
+        # The original class still can be false
+        assert_true(self.fx.a.can_be_false)
+
+    def test_true_only_of_union(self) -> None:
+        tup_type = self.tuple(AnyType())
+        # Union of something that is unknown, something that is always true, something
+        # that is always false
+        union_type = UnionType([self.fx.a, tup_type, self.tuple()])
+        to = true_only(union_type)
+        assert isinstance(to, UnionType)
+        assert_equal(len(to.items), 2)
+        assert_true(to.items[0].can_be_true)
+        assert_false(to.items[0].can_be_false)
+        assert_true(to.items[1] is tup_type)
+
+    def test_false_only_of_true_type_is_uninhabited(self) -> None:
+        fo = false_only(self.tuple(AnyType()))
+        assert_type(UninhabitedType, fo)
+
+    def test_false_only_of_false_type_is_idempotent(self) -> None:
+        always_false = NoneTyp()
+        fo = false_only(always_false)
+        assert_true(always_false is fo)
+
+    def test_false_only_of_instance(self) -> None:
+        fo = false_only(self.fx.a)
+        assert_equal(str(fo), "A")
+        assert_false(fo.can_be_true)
+        assert_true(fo.can_be_false)
+        assert_type(Instance, fo)
+        # The original class still can be true
+        assert_true(self.fx.a.can_be_true)
+
+    def test_false_only_of_union(self) -> None:
+        tup_type = self.tuple()
+        # Union of something that is unknown, something that is always true, something
+        # that is always false
+        union_type = UnionType([self.fx.a, self.tuple(AnyType()), tup_type])
+        assert_equal(len(union_type.items), 3)
+        fo = false_only(union_type)
+        assert isinstance(fo, UnionType)
+        assert_equal(len(fo.items), 2)
+        assert_false(fo.items[0].can_be_true)
+        assert_true(fo.items[0].can_be_false)
+        assert_true(fo.items[1] is tup_type)
+
+    # Helpers
+
+    def tuple(self, *a: Type) -> TupleType:
+        return TupleType(list(a), self.fx.std_tuple)
+
+    def callable(self, vars: List[str], *a: Type) -> CallableType:
+        """callable(args, a1, ..., an, r) constructs a callable with
+        argument types a1, ... an and return type r and type arguments
+        vars.
+        """
+        tv = []  # type: List[TypeVarDef]
+        n = -1
+        for v in vars:
+            tv.append(TypeVarDef(v, n, None, self.fx.o))
+            n -= 1
+        return CallableType(list(a[:-1]),
+                            [ARG_POS] * (len(a) - 1),
+                            [None] * (len(a) - 1),
+                            a[-1],
+                            self.fx.function,
+                            name=None,
+                            variables=tv)
+
+
+class JoinSuite(Suite):
+    def set_up(self) -> None:
+        self.fx = TypeFixture()
+
+    def test_trivial_cases(self) -> None:
+        for simple in self.fx.void, self.fx.a, self.fx.o, self.fx.b:
+            self.assert_join(simple, simple, simple)
+
+    def test_class_subtyping(self) -> None:
+        self.assert_join(self.fx.a, self.fx.o, self.fx.o)
+        self.assert_join(self.fx.b, self.fx.o, self.fx.o)
+        self.assert_join(self.fx.a, self.fx.d, self.fx.o)
+        self.assert_join(self.fx.b, self.fx.c, self.fx.a)
+        self.assert_join(self.fx.b, self.fx.d, self.fx.o)
+
+    def test_tuples(self) -> None:
+        self.assert_join(self.tuple(), self.tuple(), self.tuple())
+        self.assert_join(self.tuple(self.fx.a),
+                         self.tuple(self.fx.a),
+                         self.tuple(self.fx.a))
+        self.assert_join(self.tuple(self.fx.b, self.fx.c),
+                         self.tuple(self.fx.a, self.fx.d),
+                         self.tuple(self.fx.a, self.fx.o))
+
+        self.assert_join(self.tuple(self.fx.a, self.fx.a),
+                         self.fx.std_tuple,
+                         self.fx.o)
+        self.assert_join(self.tuple(self.fx.a),
+                         self.tuple(self.fx.a, self.fx.a),
+                         self.fx.o)
+
+    def test_function_types(self) -> None:
+        self.assert_join(self.callable(self.fx.a, self.fx.b),
+                         self.callable(self.fx.a, self.fx.b),
+                         self.callable(self.fx.a, self.fx.b))
+
+        self.assert_join(self.callable(self.fx.a, self.fx.b),
+                         self.callable(self.fx.b, self.fx.b),
+                         self.fx.function)
+        self.assert_join(self.callable(self.fx.a, self.fx.b),
+                         self.callable(self.fx.a, self.fx.a),
+                         self.fx.function)
+        self.assert_join(self.callable(self.fx.a, self.fx.b),
+                         self.fx.function,
+                         self.fx.function)
+
+    def test_type_vars(self) -> None:
+        self.assert_join(self.fx.t, self.fx.t, self.fx.t)
+        self.assert_join(self.fx.s, self.fx.s, self.fx.s)
+        self.assert_join(self.fx.t, self.fx.s, self.fx.o)
+
+    def test_void(self) -> None:
+        self.assert_join(self.fx.void, self.fx.void, self.fx.void)
+        self.assert_join(self.fx.void, self.fx.anyt, self.fx.anyt)
+
+        # Join of any other type against void results in ErrorType, since there
+        # is no other meaningful result.
+        for t in [self.fx.a, self.fx.o, NoneTyp(), UnboundType('x'),
+                  self.fx.t, self.tuple(),
+                  self.callable(self.fx.a, self.fx.b)]:
+            self.assert_join(t, self.fx.void, self.fx.err)
+
+    def test_none(self) -> None:
+        # Any type t joined with None results in t.
+        for t in [NoneTyp(), self.fx.a, self.fx.o, UnboundType('x'),
+                  self.fx.t, self.tuple(),
+                  self.callable(self.fx.a, self.fx.b), self.fx.anyt]:
+            self.assert_join(t, NoneTyp(), t)
+
+    def test_unbound_type(self) -> None:
+        self.assert_join(UnboundType('x'), UnboundType('x'), self.fx.anyt)
+        self.assert_join(UnboundType('x'), UnboundType('y'), self.fx.anyt)
+
+        # Any type t joined with an unbound type results in dynamic. Unbound
+        # type means that there is an error somewhere in the program, so this
+        # does not affect type safety (whatever the result).
+        for t in [self.fx.a, self.fx.o, self.fx.ga, self.fx.t, self.tuple(),
+                  self.callable(self.fx.a, self.fx.b)]:
+            self.assert_join(t, UnboundType('X'), self.fx.anyt)
+
+    def test_any_type(self) -> None:
+        # Join against 'Any' type always results in 'Any'.
+        for t in [self.fx.anyt, self.fx.a, self.fx.o, NoneTyp(),
+                  UnboundType('x'), self.fx.void, self.fx.t, self.tuple(),
+                  self.callable(self.fx.a, self.fx.b)]:
+            self.assert_join(t, self.fx.anyt, self.fx.anyt)
+
+    def test_mixed_truth_restricted_type_simple(self) -> None:
+        # join_simple against differently restricted truthiness types drops restrictions.
+        true_a = true_only(self.fx.a)
+        false_o = false_only(self.fx.o)
+        j = join_simple(self.fx.o, true_a, false_o)
+        assert_true(j.can_be_true)
+        assert_true(j.can_be_false)
+
+    def test_mixed_truth_restricted_type(self) -> None:
+        # join_types against differently restricted truthiness types drops restrictions.
+        true_any = true_only(AnyType())
+        false_o = false_only(self.fx.o)
+        j = join_types(true_any, false_o)
+        assert_true(j.can_be_true)
+        assert_true(j.can_be_false)
+
+    def test_other_mixed_types(self) -> None:
+        # In general, joining unrelated types produces object.
+        for t1 in [self.fx.a, self.fx.t, self.tuple(),
+                   self.callable(self.fx.a, self.fx.b)]:
+            for t2 in [self.fx.a, self.fx.t, self.tuple(),
+                       self.callable(self.fx.a, self.fx.b)]:
+                if str(t1) != str(t2):
+                    self.assert_join(t1, t2, self.fx.o)
+
+    def test_error_type(self) -> None:
+        self.assert_join(self.fx.err, self.fx.anyt, self.fx.anyt)
+
+        # Meet against any type except dynamic results in ErrorType.
+        for t in [self.fx.a, self.fx.o, NoneTyp(), UnboundType('x'),
+                  self.fx.void, self.fx.t, self.tuple(),
+                  self.callable(self.fx.a, self.fx.b)]:
+            self.assert_join(t, self.fx.err, self.fx.err)
+
+    def test_simple_generics(self) -> None:
+        self.assert_join(self.fx.ga, self.fx.ga, self.fx.ga)
+        self.assert_join(self.fx.ga, self.fx.gb, self.fx.ga)
+        self.assert_join(self.fx.ga, self.fx.gd, self.fx.o)
+        self.assert_join(self.fx.ga, self.fx.g2a, self.fx.o)
+
+        self.assert_join(self.fx.ga, self.fx.nonet, self.fx.ga)
+        self.assert_join(self.fx.ga, self.fx.anyt, self.fx.anyt)
+
+        for t in [self.fx.a, self.fx.o, self.fx.t, self.tuple(),
+                  self.callable(self.fx.a, self.fx.b)]:
+            self.assert_join(t, self.fx.ga, self.fx.o)
+
+    def test_generics_with_multiple_args(self) -> None:
+        self.assert_join(self.fx.hab, self.fx.hab, self.fx.hab)
+        self.assert_join(self.fx.hab, self.fx.hbb, self.fx.hab)
+        self.assert_join(self.fx.had, self.fx.haa, self.fx.o)
+
+    def test_generics_with_inheritance(self) -> None:
+        self.assert_join(self.fx.gsab, self.fx.gb, self.fx.gb)
+        self.assert_join(self.fx.gsba, self.fx.gb, self.fx.ga)
+        self.assert_join(self.fx.gsab, self.fx.gd, self.fx.o)
+
+    def test_generics_with_inheritance_and_shared_supertype(self) -> None:
+        self.assert_join(self.fx.gsba, self.fx.gs2a, self.fx.ga)
+        self.assert_join(self.fx.gsab, self.fx.gs2a, self.fx.ga)
+        self.assert_join(self.fx.gsab, self.fx.gs2d, self.fx.o)
+
+    def test_generic_types_and_any(self) -> None:
+        self.assert_join(self.fx.gdyn, self.fx.ga, self.fx.gdyn)
+
+    def test_callables_with_any(self) -> None:
+        self.assert_join(self.callable(self.fx.a, self.fx.a, self.fx.anyt,
+                                       self.fx.a),
+                         self.callable(self.fx.a, self.fx.anyt, self.fx.a,
+                                       self.fx.anyt),
+                         self.callable(self.fx.a, self.fx.anyt, self.fx.anyt,
+                                       self.fx.anyt))
+
+    def test_overloaded(self) -> None:
+        c = self.callable
+
+        def ov(*items: CallableType) -> Overloaded:
+            return Overloaded(list(items))
+
+        fx = self.fx
+        func = fx.function
+        c1 = c(fx.a, fx.a)
+        c2 = c(fx.b, fx.b)
+        c3 = c(fx.c, fx.c)
+        self.assert_join(ov(c1, c2), c1, c1)
+        self.assert_join(ov(c1, c2), c2, c2)
+        self.assert_join(ov(c1, c2), ov(c1, c2), ov(c1, c2))
+        self.assert_join(ov(c1, c2), ov(c1, c3), c1)
+        self.assert_join(ov(c2, c1), ov(c3, c1), c1)
+        self.assert_join(ov(c1, c2), c3, func)
+
+    def test_overloaded_with_any(self) -> None:
+        c = self.callable
+
+        def ov(*items: CallableType) -> Overloaded:
+            return Overloaded(list(items))
+
+        fx = self.fx
+        any = fx.anyt
+        self.assert_join(ov(c(fx.a, fx.a), c(fx.b, fx.b)), c(any, fx.b), c(any, fx.b))
+        self.assert_join(ov(c(fx.a, fx.a), c(any, fx.b)), c(fx.b, fx.b), c(any, fx.b))
+
+    def test_join_interface_types(self) -> None:
+        self.skip()  # FIX
+        self.assert_join(self.fx.f, self.fx.f, self.fx.f)
+        self.assert_join(self.fx.f, self.fx.f2, self.fx.o)
+        self.assert_join(self.fx.f, self.fx.f3, self.fx.f)
+
+    def test_join_interface_and_class_types(self) -> None:
+        self.skip()  # FIX
+
+        self.assert_join(self.fx.o, self.fx.f, self.fx.o)
+        self.assert_join(self.fx.a, self.fx.f, self.fx.o)
+
+        self.assert_join(self.fx.e, self.fx.f, self.fx.f)
+
+    def test_join_class_types_with_interface_result(self) -> None:
+        self.skip()  # FIX
+        # Unique result
+        self.assert_join(self.fx.e, self.fx.e2, self.fx.f)
+
+        # Ambiguous result
+        self.assert_join(self.fx.e2, self.fx.e3, self.fx.err)
+
+    def test_generic_interfaces(self) -> None:
+        self.skip()  # FIX
+
+        fx = InterfaceTypeFixture()
+
+        self.assert_join(fx.gfa, fx.gfa, fx.gfa)
+        self.assert_join(fx.gfa, fx.gfb, fx.o)
+
+        self.assert_join(fx.m1, fx.gfa, fx.gfa)
+
+        self.assert_join(fx.m1, fx.gfb, fx.o)
+
+    def test_simple_type_objects(self) -> None:
+        t1 = self.type_callable(self.fx.a, self.fx.a)
+        t2 = self.type_callable(self.fx.b, self.fx.b)
+
+        self.assert_join(t1, t1, t1)
+        j = join_types(t1, t1)
+        assert isinstance(j, CallableType)
+        assert_true(j.is_type_obj())
+
+        self.assert_join(t1, t2, self.fx.type_type)
+        self.assert_join(t1, self.fx.type_type, self.fx.type_type)
+        self.assert_join(self.fx.type_type, self.fx.type_type,
+                         self.fx.type_type)
+
+    def test_type_type(self) -> None:
+        self.assert_join(self.fx.type_a, self.fx.type_b, self.fx.type_a)
+        self.assert_join(self.fx.type_b, self.fx.type_any, self.fx.type_any)
+        self.assert_join(self.fx.type_b, self.fx.type_type, self.fx.type_type)
+        self.assert_join(self.fx.type_b, self.fx.type_c, self.fx.type_a)
+        self.assert_join(self.fx.type_c, self.fx.type_d, TypeType(self.fx.o))
+        self.assert_join(self.fx.type_type, self.fx.type_any, self.fx.type_type)
+        self.assert_join(self.fx.type_b, self.fx.anyt, self.fx.anyt)
+
+    # There are additional test cases in check-inference.test.
+
+    # TODO: Function types + varargs and default args.
+
+    def assert_join(self, s: Type, t: Type, join: Type) -> None:
+        self.assert_simple_join(s, t, join)
+        self.assert_simple_join(t, s, join)
+
+    def assert_simple_join(self, s: Type, t: Type, join: Type) -> None:
+        result = join_types(s, t)
+        actual = str(result)
+        expected = str(join)
+        assert_equal(actual, expected,
+                     'join({}, {}) == {{}} ({{}} expected)'.format(s, t))
+        if not isinstance(s, ErrorType) and not isinstance(result, ErrorType):
+            assert_true(is_subtype(s, result),
+                        '{} not subtype of {}'.format(s, result))
+        if not isinstance(t, ErrorType) and not isinstance(result, ErrorType):
+            assert_true(is_subtype(t, result),
+                        '{} not subtype of {}'.format(t, result))
+
+    def tuple(self, *a: Type) -> TupleType:
+        return TupleType(list(a), self.fx.std_tuple)
+
+    def callable(self, *a: Type) -> CallableType:
+        """callable(a1, ..., an, r) constructs a callable with argument types
+        a1, ... an and return type r.
+        """
+        n = len(a) - 1
+        return CallableType(list(a[:-1]), [ARG_POS] * n, [None] * n,
+                        a[-1], self.fx.function)
+
+    def type_callable(self, *a: Type) -> CallableType:
+        """type_callable(a1, ..., an, r) constructs a callable with
+        argument types a1, ... an and return type r, and which
+        represents a type.
+        """
+        n = len(a) - 1
+        return CallableType(list(a[:-1]), [ARG_POS] * n, [None] * n,
+                        a[-1], self.fx.type_type)
+
+
+class MeetSuite(Suite):
+    def set_up(self) -> None:
+        self.fx = TypeFixture()
+
+    def test_trivial_cases(self) -> None:
+        for simple in self.fx.void, self.fx.a, self.fx.o, self.fx.b:
+            self.assert_meet(simple, simple, simple)
+
+    def test_class_subtyping(self) -> None:
+        self.assert_meet(self.fx.a, self.fx.o, self.fx.a)
+        self.assert_meet(self.fx.a, self.fx.b, self.fx.b)
+        self.assert_meet(self.fx.b, self.fx.o, self.fx.b)
+        self.assert_meet(self.fx.a, self.fx.d, NoneTyp())
+        self.assert_meet(self.fx.b, self.fx.c, NoneTyp())
+
+    def test_tuples(self) -> None:
+        self.assert_meet(self.tuple(), self.tuple(), self.tuple())
+        self.assert_meet(self.tuple(self.fx.a),
+                         self.tuple(self.fx.a),
+                         self.tuple(self.fx.a))
+        self.assert_meet(self.tuple(self.fx.b, self.fx.c),
+                         self.tuple(self.fx.a, self.fx.d),
+                         self.tuple(self.fx.b, NoneTyp()))
+
+        self.assert_meet(self.tuple(self.fx.a, self.fx.a),
+                         self.fx.std_tuple,
+                         NoneTyp())
+        self.assert_meet(self.tuple(self.fx.a),
+                         self.tuple(self.fx.a, self.fx.a),
+                         NoneTyp())
+
+    def test_function_types(self) -> None:
+        self.assert_meet(self.callable(self.fx.a, self.fx.b),
+                         self.callable(self.fx.a, self.fx.b),
+                         self.callable(self.fx.a, self.fx.b))
+
+        self.assert_meet(self.callable(self.fx.a, self.fx.b),
+                         self.callable(self.fx.b, self.fx.b),
+                         NoneTyp())
+        self.assert_meet(self.callable(self.fx.a, self.fx.b),
+                         self.callable(self.fx.a, self.fx.a),
+                         NoneTyp())
+
+    def test_type_vars(self) -> None:
+        self.assert_meet(self.fx.t, self.fx.t, self.fx.t)
+        self.assert_meet(self.fx.s, self.fx.s, self.fx.s)
+        self.assert_meet(self.fx.t, self.fx.s, NoneTyp())
+
+    def test_void(self) -> None:
+        self.assert_meet(self.fx.void, self.fx.void, self.fx.void)
+        self.assert_meet(self.fx.void, self.fx.anyt, self.fx.void)
+
+        # Meet of any other type against void results in ErrorType, since there
+        # is no meaningful valid result.
+        for t in [self.fx.a, self.fx.o, UnboundType('x'), NoneTyp(),
+                  self.fx.t, self.tuple(),
+                  self.callable(self.fx.a, self.fx.b)]:
+            self.assert_meet(t, self.fx.void, self.fx.err)
+
+    def test_none(self) -> None:
+        self.assert_meet(NoneTyp(), NoneTyp(), NoneTyp())
+
+        self.assert_meet(NoneTyp(), self.fx.anyt, NoneTyp())
+        self.assert_meet(NoneTyp(), self.fx.void, self.fx.err)
+
+        # Any type t joined with None results in None, unless t is any or
+        # void.
+        for t in [self.fx.a, self.fx.o, UnboundType('x'), self.fx.t,
+                  self.tuple(), self.callable(self.fx.a, self.fx.b)]:
+            self.assert_meet(t, NoneTyp(), NoneTyp())
+
+    def test_unbound_type(self) -> None:
+        self.assert_meet(UnboundType('x'), UnboundType('x'), self.fx.anyt)
+        self.assert_meet(UnboundType('x'), UnboundType('y'), self.fx.anyt)
+
+        self.assert_meet(UnboundType('x'), self.fx.void, self.fx.err)
+        self.assert_meet(UnboundType('x'), self.fx.anyt, UnboundType('x'))
+
+        # The meet of any type t with an unbound type results in dynamic
+        # (except for void). Unbound type means that there is an error
+        # somewhere in the program, so this does not affect type safety.
+        for t in [self.fx.a, self.fx.o, self.fx.t, self.tuple(),
+                  self.callable(self.fx.a, self.fx.b)]:
+            self.assert_meet(t, UnboundType('X'), self.fx.anyt)
+
+    def test_dynamic_type(self) -> None:
+        # Meet against dynamic type always results in dynamic.
+        for t in [self.fx.anyt, self.fx.a, self.fx.o, NoneTyp(),
+                  UnboundType('x'), self.fx.void, self.fx.t, self.tuple(),
+                  self.callable(self.fx.a, self.fx.b)]:
+            self.assert_meet(t, self.fx.anyt, t)
+
+    def test_error_type(self) -> None:
+        self.assert_meet(self.fx.err, self.fx.anyt, self.fx.err)
+
+        # Meet against any type except dynamic results in ErrorType.
+        for t in [self.fx.a, self.fx.o, NoneTyp(), UnboundType('x'),
+                  self.fx.void, self.fx.t, self.tuple(),
+                  self.callable(self.fx.a, self.fx.b)]:
+            self.assert_meet(t, self.fx.err, self.fx.err)
+
+    def test_simple_generics(self) -> None:
+        self.assert_meet(self.fx.ga, self.fx.ga, self.fx.ga)
+        self.assert_meet(self.fx.ga, self.fx.o, self.fx.ga)
+        self.assert_meet(self.fx.ga, self.fx.gb, self.fx.gb)
+        self.assert_meet(self.fx.ga, self.fx.gd, self.fx.nonet)
+        self.assert_meet(self.fx.ga, self.fx.g2a, self.fx.nonet)
+
+        self.assert_meet(self.fx.ga, self.fx.nonet, self.fx.nonet)
+        self.assert_meet(self.fx.ga, self.fx.anyt, self.fx.ga)
+
+        for t in [self.fx.a, self.fx.t, self.tuple(),
+                  self.callable(self.fx.a, self.fx.b)]:
+            self.assert_meet(t, self.fx.ga, self.fx.nonet)
+
+    def test_generics_with_multiple_args(self) -> None:
+        self.assert_meet(self.fx.hab, self.fx.hab, self.fx.hab)
+        self.assert_meet(self.fx.hab, self.fx.haa, self.fx.hab)
+        self.assert_meet(self.fx.hab, self.fx.had, self.fx.nonet)
+        self.assert_meet(self.fx.hab, self.fx.hbb, self.fx.hbb)
+
+    def test_generics_with_inheritance(self) -> None:
+        self.assert_meet(self.fx.gsab, self.fx.gb, self.fx.gsab)
+        self.assert_meet(self.fx.gsba, self.fx.gb, self.fx.nonet)
+
+    def test_generics_with_inheritance_and_shared_supertype(self) -> None:
+        self.assert_meet(self.fx.gsba, self.fx.gs2a, self.fx.nonet)
+        self.assert_meet(self.fx.gsab, self.fx.gs2a, self.fx.nonet)
+
+    def test_generic_types_and_dynamic(self) -> None:
+        self.assert_meet(self.fx.gdyn, self.fx.ga, self.fx.ga)
+
+    def test_callables_with_dynamic(self) -> None:
+        self.assert_meet(self.callable(self.fx.a, self.fx.a, self.fx.anyt,
+                                       self.fx.a),
+                         self.callable(self.fx.a, self.fx.anyt, self.fx.a,
+                                       self.fx.anyt),
+                         self.callable(self.fx.a, self.fx.anyt, self.fx.anyt,
+                                       self.fx.anyt))
+
+    def test_meet_interface_types(self) -> None:
+        self.assert_meet(self.fx.f, self.fx.f, self.fx.f)
+        self.assert_meet(self.fx.f, self.fx.f2, self.fx.nonet)
+        self.assert_meet(self.fx.f, self.fx.f3, self.fx.f3)
+
+    def test_meet_interface_and_class_types(self) -> None:
+        self.assert_meet(self.fx.o, self.fx.f, self.fx.f)
+        self.assert_meet(self.fx.a, self.fx.f, self.fx.nonet)
+
+        self.assert_meet(self.fx.e, self.fx.f, self.fx.e)
+
+    def test_meet_class_types_with_shared_interfaces(self) -> None:
+        # These have nothing special with respect to meets, unlike joins. These
+        # are for completeness only.
+        self.assert_meet(self.fx.e, self.fx.e2, self.fx.nonet)
+        self.assert_meet(self.fx.e2, self.fx.e3, self.fx.nonet)
+
+    def test_meet_with_generic_interfaces(self) -> None:
+        # TODO fix
+        self.skip()
+
+        fx = InterfaceTypeFixture()
+        self.assert_meet(fx.gfa, fx.m1, fx.m1)
+        self.assert_meet(fx.gfa, fx.gfa, fx.gfa)
+        self.assert_meet(fx.gfb, fx.m1, fx.nonet)
+
+    def test_type_type(self) -> None:
+        self.assert_meet(self.fx.type_a, self.fx.type_b, self.fx.type_b)
+        self.assert_meet(self.fx.type_b, self.fx.type_any, self.fx.type_b)
+        self.assert_meet(self.fx.type_b, self.fx.type_type, self.fx.type_b)
+        self.assert_meet(self.fx.type_b, self.fx.type_c, self.fx.nonet)
+        self.assert_meet(self.fx.type_c, self.fx.type_d, self.fx.nonet)
+        self.assert_meet(self.fx.type_type, self.fx.type_any, self.fx.type_any)
+        self.assert_meet(self.fx.type_b, self.fx.anyt, self.fx.type_b)
+
+    # FIX generic interfaces + ranges
+
+    def assert_meet(self, s: Type, t: Type, meet: Type) -> None:
+        self.assert_simple_meet(s, t, meet)
+        self.assert_simple_meet(t, s, meet)
+
+    def assert_simple_meet(self, s: Type, t: Type, meet: Type) -> None:
+        result = meet_types(s, t)
+        actual = str(result)
+        expected = str(meet)
+        assert_equal(actual, expected,
+                     'meet({}, {}) == {{}} ({{}} expected)'.format(s, t))
+        if not isinstance(s, ErrorType) and not isinstance(result, ErrorType):
+            assert_true(is_subtype(result, s),
+                        '{} not subtype of {}'.format(result, s))
+        if not isinstance(t, ErrorType) and not isinstance(result, ErrorType):
+            assert_true(is_subtype(result, t),
+                        '{} not subtype of {}'.format(result, t))
+
+    def tuple(self, *a: Type) -> TupleType:
+        return TupleType(list(a), self.fx.std_tuple)
+
+    def callable(self, *a: Type) -> CallableType:
+        """callable(a1, ..., an, r) constructs a callable with argument types
+        a1, ... an and return type r.
+        """
+        n = len(a) - 1
+        return CallableType(list(a[:-1]),
+                            [ARG_POS] * n, [None] * n,
+                            a[-1], self.fx.function)
diff --git a/typeshed/stdlib/3.2/xml/__init__.pyi b/mypy/test/update.py
similarity index 100%
rename from typeshed/stdlib/3.2/xml/__init__.pyi
rename to mypy/test/update.py
diff --git a/mypy/traverser.py b/mypy/traverser.py
index 18b0d57..35da22a 100644
--- a/mypy/traverser.py
+++ b/mypy/traverser.py
@@ -95,6 +95,8 @@ class TraverserVisitor(NodeVisitor[None]):
     def visit_assert_stmt(self, o: AssertStmt) -> None:
         if o.expr is not None:
             o.expr.accept(self)
+        if o.msg is not None:
+            o.msg.accept(self)
 
     def visit_del_stmt(self, o: DelStmt) -> None:
         if o.expr is not None:
diff --git a/mypy/treetransform.py b/mypy/treetransform.py
index 1384d13..e6e4678 100644
--- a/mypy/treetransform.py
+++ b/mypy/treetransform.py
@@ -246,13 +246,14 @@ class TransformVisitor(NodeVisitor[Node]):
         return ForStmt(self.expr(node.index),
                        self.expr(node.expr),
                        self.block(node.body),
-                       self.optional_block(node.else_body))
+                       self.optional_block(node.else_body),
+                       self.optional_type(node.index_type))
 
     def visit_return_stmt(self, node: ReturnStmt) -> ReturnStmt:
         return ReturnStmt(self.optional_expr(node.expr))
 
     def visit_assert_stmt(self, node: AssertStmt) -> AssertStmt:
-        return AssertStmt(self.expr(node.expr))
+        return AssertStmt(self.expr(node.expr), self.optional_expr(node.msg))
 
     def visit_del_stmt(self, node: DelStmt) -> DelStmt:
         return DelStmt(self.expr(node.expr))
@@ -286,7 +287,8 @@ class TransformVisitor(NodeVisitor[Node]):
     def visit_with_stmt(self, node: WithStmt) -> WithStmt:
         return WithStmt(self.expressions(node.expr),
                         self.optional_expressions(node.target),
-                        self.block(node.body))
+                        self.block(node.body),
+                        self.optional_type(node.target_type))
 
     def visit_print_stmt(self, node: PrintStmt) -> PrintStmt:
         return PrintStmt(self.expressions(node.args),
diff --git a/mypy/typefixture.py b/mypy/typefixture.py
index a5fac17..8afcfac 100644
--- a/mypy/typefixture.py
+++ b/mypy/typefixture.py
@@ -155,38 +155,38 @@ class TypeFixture:
 
     # Helper methods
 
-    def callable(self, *a):
+    def callable(self, *a: Type) -> CallableType:
         """callable(a1, ..., an, r) constructs a callable with argument types
         a1, ... an and return type r.
         """
-        return CallableType(a[:-1], [ARG_POS] * (len(a) - 1),
+        return CallableType(list(a[:-1]), [ARG_POS] * (len(a) - 1),
                         [None] * (len(a) - 1), a[-1], self.function)
 
-    def callable_type(self, *a):
+    def callable_type(self, *a: Type) -> CallableType:
         """callable_type(a1, ..., an, r) constructs a callable with
         argument types a1, ... an and return type r, and which
         represents a type.
         """
-        return CallableType(a[:-1], [ARG_POS] * (len(a) - 1),
+        return CallableType(list(a[:-1]), [ARG_POS] * (len(a) - 1),
                         [None] * (len(a) - 1), a[-1], self.type_type)
 
-    def callable_default(self, min_args, *a):
+    def callable_default(self, min_args: int, *a: Type) -> CallableType:
         """callable_default(min_args, a1, ..., an, r) constructs a
         callable with argument types a1, ... an and return type r,
         with min_args mandatory fixed arguments.
         """
         n = len(a) - 1
-        return CallableType(a[:-1],
+        return CallableType(list(a[:-1]),
                             [ARG_POS] * min_args + [ARG_OPT] * (n - min_args),
                             [None] * n,
                             a[-1], self.function)
 
-    def callable_var_arg(self, min_args, *a):
+    def callable_var_arg(self, min_args: int, *a: Type) -> CallableType:
         """callable_var_arg(min_args, a1, ..., an, r) constructs a callable
         with argument types a1, ... *an and return type r.
         """
         n = len(a) - 1
-        return CallableType(a[:-1],
+        return CallableType(list(a[:-1]),
                             [ARG_POS] * min_args +
                             [ARG_OPT] * (n - 1 - min_args) +
                             [ARG_STAR], [None] * n,
@@ -241,7 +241,7 @@ class InterfaceTypeFixture(TypeFixture):
     """Extension of TypeFixture that contains additional generic
     interface types."""
 
-    def __init__(self):
+    def __init__(self) -> None:
         super().__init__()
         # GF[T]
         self.gfi = self.make_type_info('GF', typevars=['T'], is_abstract=True)
diff --git a/mypy/types.py b/mypy/types.py
index 32d7c83..3d27d52 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -1034,13 +1034,18 @@ class UnionType(Type):
             return AnyType()
 
         from mypy.subtypes import is_subtype
+        from mypy.sametypes import is_same_type
+
         removed = set()  # type: Set[int]
         for i, ti in enumerate(items):
             if i in removed: continue
             # Keep track of the truishness info for deleted subtypes which can be relevant
             cbt = cbf = False
             for j, tj in enumerate(items):
-                if i != j and is_subtype(tj, ti):
+                if (i != j
+                    and is_subtype(tj, ti)
+                    and (not (isinstance(tj, Instance) and tj.type.fallback_to_any)
+                         or is_same_type(ti, tj))):
                     removed.add(j)
                     cbt = cbt or tj.can_be_true
                     cbf = cbf or tj.can_be_false
diff --git a/mypy/typevars.py b/mypy/typevars.py
new file mode 100644
index 0000000..1bdb104
--- /dev/null
+++ b/mypy/typevars.py
@@ -0,0 +1,24 @@
+from typing import Union
+
+from mypy.nodes import TypeInfo
+
+from mypy.erasetype import erase_typevars
+from mypy.sametypes import is_same_type
+from mypy.types import Instance, TypeVarType, TupleType, Type
+
+
+def fill_typevars(typ: TypeInfo) -> Union[Instance, TupleType]:
+    """For a non-generic type, return instance type representing the type.
+    For a generic G type with parameters T1, .., Tn, return G[T1, ..., Tn].
+    """
+    tv = []  # type: List[Type]
+    for i in range(len(typ.type_vars)):
+        tv.append(TypeVarType(typ.defn.type_vars[i]))
+    inst = Instance(typ, tv)
+    if typ.tuple_type is None:
+        return inst
+    return typ.tuple_type.copy_modified(fallback=inst)
+
+
+def has_no_typevars(typ: Type) -> bool:
+    return is_same_type(typ, erase_typevars(typ))
diff --git a/mypy/version.py b/mypy/version.py
index e6a889f..8d8eed8 100644
--- a/mypy/version.py
+++ b/mypy/version.py
@@ -1,7 +1,7 @@
 import os
 from mypy import git
 
-__version__ = '0.470'
+__version__ = '0.480-dev'
 base_version = __version__
 
 mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
diff --git a/mypy/visitor.py b/mypy/visitor.py
index 33f287b..df04ebd 100644
--- a/mypy/visitor.py
+++ b/mypy/visitor.py
@@ -1,5 +1,6 @@
 """Generic abstract syntax tree node visitor"""
 
+from abc import abstractmethod
 from typing import TypeVar, Generic
 
 if False:
@@ -10,7 +11,173 @@ if False:
 T = TypeVar('T')
 
 
-class NodeVisitor(Generic[T]):
+class ExpressionVisitor(Generic[T]):
+    @abstractmethod
+    def visit_int_expr(self, o: 'mypy.nodes.IntExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_str_expr(self, o: 'mypy.nodes.StrExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_bytes_expr(self, o: 'mypy.nodes.BytesExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_unicode_expr(self, o: 'mypy.nodes.UnicodeExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_float_expr(self, o: 'mypy.nodes.FloatExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_complex_expr(self, o: 'mypy.nodes.ComplexExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_ellipsis(self, o: 'mypy.nodes.EllipsisExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_star_expr(self, o: 'mypy.nodes.StarExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_name_expr(self, o: 'mypy.nodes.NameExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_member_expr(self, o: 'mypy.nodes.MemberExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_yield_from_expr(self, o: 'mypy.nodes.YieldFromExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_yield_expr(self, o: 'mypy.nodes.YieldExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_call_expr(self, o: 'mypy.nodes.CallExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_op_expr(self, o: 'mypy.nodes.OpExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_comparison_expr(self, o: 'mypy.nodes.ComparisonExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_cast_expr(self, o: 'mypy.nodes.CastExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_reveal_type_expr(self, o: 'mypy.nodes.RevealTypeExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_super_expr(self, o: 'mypy.nodes.SuperExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_unary_expr(self, o: 'mypy.nodes.UnaryExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_list_expr(self, o: 'mypy.nodes.ListExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_dict_expr(self, o: 'mypy.nodes.DictExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_tuple_expr(self, o: 'mypy.nodes.TupleExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_set_expr(self, o: 'mypy.nodes.SetExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_index_expr(self, o: 'mypy.nodes.IndexExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_type_application(self, o: 'mypy.nodes.TypeApplication') -> T:
+        pass
+
+    @abstractmethod
+    def visit_func_expr(self, o: 'mypy.nodes.FuncExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_list_comprehension(self, o: 'mypy.nodes.ListComprehension') -> T:
+        pass
+
+    @abstractmethod
+    def visit_set_comprehension(self, o: 'mypy.nodes.SetComprehension') -> T:
+        pass
+
+    @abstractmethod
+    def visit_dictionary_comprehension(self, o: 'mypy.nodes.DictionaryComprehension') -> T:
+        pass
+
+    @abstractmethod
+    def visit_generator_expr(self, o: 'mypy.nodes.GeneratorExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_slice_expr(self, o: 'mypy.nodes.SliceExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_conditional_expr(self, o: 'mypy.nodes.ConditionalExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_backquote_expr(self, o: 'mypy.nodes.BackquoteExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_type_var_expr(self, o: 'mypy.nodes.TypeVarExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_type_alias_expr(self, o: 'mypy.nodes.TypeAliasExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_namedtuple_expr(self, o: 'mypy.nodes.NamedTupleExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_typeddict_expr(self, o: 'mypy.nodes.TypedDictExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_newtype_expr(self, o: 'mypy.nodes.NewTypeExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit__promote_expr(self, o: 'mypy.nodes.PromoteExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_await_expr(self, o: 'mypy.nodes.AwaitExpr') -> T:
+        pass
+
+    @abstractmethod
+    def visit_temp_node(self, o: 'mypy.nodes.TempNode') -> T:
+        pass
+
+
+class NodeVisitor(Generic[T], ExpressionVisitor[T]):
     """Empty base class for parse tree node visitors.
 
     The T type argument specifies the return type of the visit
@@ -115,7 +282,7 @@ class NodeVisitor(Generic[T]):
     def visit_exec_stmt(self, o: 'mypy.nodes.ExecStmt') -> T:
         pass
 
-    # Expressions
+    # Expressions (default no-op implementation)
 
     def visit_int_expr(self, o: 'mypy.nodes.IntExpr') -> T:
         pass
diff --git a/mypy_self_check.ini b/mypy_self_check.ini
new file mode 100644
index 0000000..de99649
--- /dev/null
+++ b/mypy_self_check.ini
@@ -0,0 +1,8 @@
+[mypy]
+disallow_untyped_defs = True
+disallow_subclassing_any = True
+warn_no_return = True
+
+; historical exceptions
+[mypy-mypy.test.testextensions]
+disallow_untyped_defs = False
diff --git a/mypy_strict_optional.ini b/mypy_strict_optional.ini
new file mode 100644
index 0000000..3b7d272
--- /dev/null
+++ b/mypy_strict_optional.ini
@@ -0,0 +1,5 @@
+; Mypy is run both with and without this config file in CI.
+; This allows us to make mypy strict Optional compliant over time.
+[mypy]
+strict_optional = True
+ignore_errors = True
diff --git a/pinfer/.gitignore b/pinfer/.gitignore
new file mode 100644
index 0000000..e1dace5
--- /dev/null
+++ b/pinfer/.gitignore
@@ -0,0 +1,3 @@
+__pycache__
+*~
+*.pyc
diff --git a/pinfer/LICENSE b/pinfer/LICENSE
new file mode 100644
index 0000000..ecdce98
--- /dev/null
+++ b/pinfer/LICENSE
@@ -0,0 +1,27 @@
+pinfer is licensed under the terms of the MIT license, reproduced below.
+
+= = = = =
+
+The MIT License
+
+Copyright (c) 2013, 2014 Jukka Lehtosalo
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
+= = = = =
diff --git a/pinfer/README b/pinfer/README
new file mode 100644
index 0000000..1f4fe4c
--- /dev/null
+++ b/pinfer/README
@@ -0,0 +1,47 @@
+ReadMe for pinfer
+=================
+
+Introduction
+------------
+
+Pinfer is tool for runtime type inference of variable types and
+function signatures in Python programs. The inferred types are mostly
+compatible with mypy types. It is intended for coming up with draft
+types when migrating Python code to static typing, but it can also be
+used as a code understanding or analysis tool.
+
+Pinfer is very experimental!
+
+Requirements
+------------
+
+ * Python 3.2 or later
+
+Basic usage
+-----------
+
+To infer types of all functions and methods in a module:
+
+  import foo   # target module
+  import pinfer
+
+  # set up type inference and dumping
+  pinfer.infer_module(foo)
+  pinfer.dump_at_exit()
+
+  # now do something with the module, e.g. run tests
+
+For inferring a Python module, add the above lines to the test suite.
+
+Handy wrapper
+-------------
+
+The p.py script provides a handy wrapper for the above.
+
+
+Copyright
+-------------
+
+This project includes files from the open source CPython project.  Those files are Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved.  The license can be found at https://github.com/python/cpython/blob/master/LICENSE.
+
diff --git a/typeshed/stdlib/3.2/xml/etree/__init__.pyi b/pinfer/__init__.py
similarity index 100%
rename from typeshed/stdlib/3.2/xml/etree/__init__.pyi
rename to pinfer/__init__.py
diff --git a/pinfer/inspect3.py b/pinfer/inspect3.py
new file mode 100644
index 0000000..4d74be1
--- /dev/null
+++ b/pinfer/inspect3.py
@@ -0,0 +1,122 @@
+# from Python 3's inspect.py
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+# 2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved
+'''
+provide getfullargspec() and getcallargs() for Python 2
+'''
+
+import sys
+import inspect
+
+if sys.version_info.major == 2:
+
+    def getfullargspec(func):
+        (args, varargs, keywords, defaults) = inspect.getargspec(func)
+        return (args, varargs, keywords, defaults, [], [], {})
+
+
+    def getcallargs(*func_and_positional, **named):
+        """Get the mapping of arguments to values.
+
+        A dict is returned, with keys the function argument names (including the
+        names of the * and ** arguments, if any), and values the respective bound
+        values from 'positional' and 'named'."""
+        func = func_and_positional[0]
+        positional = func_and_positional[1:]
+        spec = getfullargspec(func)
+        args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, ann = spec
+        f_name = func.__name__
+        arg2value = {}
+
+
+        if inspect.ismethod(func) and func.__self__ is not None:
+            # implicit 'self' (or 'cls' for classmethods) argument
+            positional = (func.__self__,) + positional
+        num_pos = len(positional)
+        num_args = len(args)
+        num_defaults = len(defaults) if defaults else 0
+
+        n = min(num_pos, num_args)
+        for i in range(n):
+            arg2value[args[i]] = positional[i]
+        if varargs:
+            arg2value[varargs] = tuple(positional[n:])
+        possible_kwargs = set(args + kwonlyargs)
+        if varkw:
+            arg2value[varkw] = {}
+        for kw, value in named.items():
+            if kw not in possible_kwargs:
+                if not varkw:
+                    raise TypeError("%s() got an unexpected keyword argument %r" %
+                                    (f_name, kw))
+                arg2value[varkw][kw] = value
+                continue
+            if kw in arg2value:
+                raise TypeError("%s() got multiple values for argument %r" %
+                                (f_name, kw))
+            arg2value[kw] = value
+        if num_pos > num_args and not varargs:
+            _too_many(f_name, args, kwonlyargs, varargs, num_defaults,
+                       num_pos, arg2value)
+        if num_pos < num_args:
+            req = args[:num_args - num_defaults]
+            for arg in req:
+                if arg not in arg2value:
+                    _missing_arguments(f_name, req, True, arg2value)
+            for i, arg in enumerate(args[num_args - num_defaults:]):
+                if arg not in arg2value:
+                    arg2value[arg] = defaults[i]
+        missing = 0
+        for kwarg in kwonlyargs:
+            if kwarg not in arg2value:
+                if kwonlydefaults and kwarg in kwonlydefaults:
+                    arg2value[kwarg] = kwonlydefaults[kwarg]
+                else:
+                    missing += 1
+        if missing:
+            _missing_arguments(f_name, kwonlyargs, False, arg2value)
+        return arg2value
+
+
+    def _too_many(f_name, args, kwonly, varargs, defcount, given, values):
+        atleast = len(args) - defcount
+        kwonly_given = len([arg for arg in kwonly if arg in values])
+        if varargs:
+            plural = atleast != 1
+            sig = "at least %d" % (atleast,)
+        elif defcount:
+            plural = True
+            sig = "from %d to %d" % (atleast, len(args))
+        else:
+            plural = len(args) != 1
+            sig = str(len(args))
+        kwonly_sig = ""
+        if kwonly_given:
+            msg = " positional argument%s (and %d keyword-only argument%s)"
+            kwonly_sig = (msg % ("s" if given != 1 else "", kwonly_given,
+                                 "s" if kwonly_given != 1 else ""))
+        raise TypeError("%s() takes %s positional argument%s but %d%s %s given" %
+                (f_name, sig, "s" if plural else "", given, kwonly_sig,
+                 "was" if given == 1 and not kwonly_given else "were"))
+
+
+    def _missing_arguments(f_name, argnames, pos, values):
+        names = [repr(name) for name in argnames if name not in values]
+        missing = len(names)
+        if missing == 1:
+            s = names[0]
+        elif missing == 2:
+            s = "{} and {}".format(*names)
+        else:
+            tail = ", {} and {}".format(*names[-2:])
+            del names[-2:]
+            s = ", ".join(names) + tail
+        raise TypeError("%s() missing %i required %s argument%s: %s" %
+                        (f_name, missing,
+                          "positional" if pos else "keyword-only",
+                          "" if missing == 1 else "s", s))
+
+
+else:
+    getfullargspec = inspect.getfullargspec
+    getcallargs = inspect.getcallargs
diff --git a/pinfer/p.py b/pinfer/p.py
new file mode 100644
index 0000000..451038d
--- /dev/null
+++ b/pinfer/p.py
@@ -0,0 +1,83 @@
+#!/usr/bin/env python3
+"""Stub to run pinfer on a module.
+
+Usage:
+
+  p.py targetmod testfile [outfile] [ -- testargs]
+
+Where:
+
+  targetmod:  the full target module (e.g. textwrap)
+  testfile: the full test module file (e.g. test/test_textwrap.py)
+  outfile:  where to write the annotated module.  If unspecified, will
+            write stubs at end of stdout.
+
+Example invocation:
+
+  python3 p.py textwrap test/test_textwrap.py
+"""
+
+
+import sys
+import imp
+import pinfer
+import os
+import atexit
+import inspect
+
+iport = __builtins__.__import__
+watched = set()
+
+
+def inferring_import(*args, **kwargs):
+    module = iport(*args, **kwargs)
+    if module not in watched:
+        watched.add(module)
+        pinfer.infer_module(module)
+    return module
+
+
+def main():
+    if '--' in sys.argv:
+        argslen = sys.argv.index('--')
+    else:
+        argslen = len(sys.argv)
+    args = sys.argv[1:argslen]
+    del sys.argv[1:argslen + 1]
+
+    if len(args) == 2:
+        targetpackage, testfile = args
+        outfile = None
+    elif len(args) == 3:
+        targetpackage, testfile, outfile = args
+    else:
+        sys.stderr.write('Usage: %s targetmodule testfile [outfile] [ -- testargs]\n' %
+                         sys.argv[0])
+        sys.exit(2)
+
+    # help us with local imports
+    filemodule = os.path.dirname(os.path.abspath(testfile))
+    sys.path.append(filemodule)
+
+    targetmod = __import__(targetpackage)
+    targetfile = inspect.getfile(targetmod)
+    pinfer.infer_module(targetmod)
+
+    if outfile:
+        @atexit.register
+        def rewrite_file(targetfile=targetfile, outfile=outfile, pinfer=pinfer):
+            if targetfile.endswith(".pyc"):
+                targetfile = targetfile[0:-1]
+            annotated = pinfer.annotate_file(targetfile)
+            open(outfile, "w").write(annotated)
+    else:
+        pinfer.dump_at_exit()
+
+    pinfer.ignore_files.add(os.path.abspath(testfile))
+
+    # run testfile as main
+    del sys.modules['__main__']
+    imp.load_source('__main__', testfile)
+
+if __name__ == '__main__':
+    main()
diff --git a/pinfer/pinfer.py b/pinfer/pinfer.py
new file mode 100644
index 0000000..3dd1445
--- /dev/null
+++ b/pinfer/pinfer.py
@@ -0,0 +1,686 @@
+"""Tools for runtime type inference"""
+
+import inspect
+from inspect3 import getfullargspec, getcallargs
+import types
+import codecs
+import os
+import tokenize
+try:
+    from StringIO import StringIO
+    from unparse import Unparser
+except:
+    from io import StringIO
+    from unparse3 import Unparser
+import ast
+
+
+MAX_INFERRED_TUPLE_LENGTH = 10
+PREFERRED_LINE_LENGTH = 79
+
+
+var_db = {}  # (location, variable) -> type
+func_argid_db = {}  # funcid -> argspec
+func_arg_db = {}  # (funcid, name) -> type
+func_return_db = {}  # funcname -> type
+func_source_db = {}  # funcid -> source string
+#func_info_db = {}  # funcid -> (class, name, argspec, file, line, source)
+ignore_files = set()
+
+# The type inferencing wrapper should not be reentrant.  It's not, in theory, calling
+# out to any external code which we would want to infer the types of.  However,
+# sometimes we do something like infer_type(arg.keys()) or infer_type(arg.values()) if
+# the arg is a collection, and we want to know about the types of its elements.  .keys(),
+# .values(), etc. can be overloaded, possibly to a method we've wrapped.  This can become
+# infinitely recursive, particularly because on something like arg.keys(), keys() gets passed
+# arg as the first parameter, so if we've wrapped keys() we'll try to infer_type(arg),
+# which will detect it's a dictionary, call infer_type(arg.keys()), recurse and so on.
+# We ran in to this problem with collections.OrderedDict.
+# To prevent reentrancy, we set is_performing_inference = True iff we're in the middle of
+# inferring the types of a function.  If we try to run another function we've wrapped,
+# we skip type inferencing so we can't accidentally infinitely recurse.
+is_performing_inference = False
+
+
+def reset():
+    global var_db, func_argid_db, func_arg_db, func_return_db, func_source_db
+    global ignore_files, is_performing_inference
+    var_db = {}
+    func_arg_db = {}
+    func_return_db = {}
+    # we don't actually want to clear these on reset(), or we'll
+    # lose the functions we've already wrapped forever.
+    #func_source_db = {}
+    #func_argid_db = {}
+    is_performing_inference = False
+    ignore_files = set()
+
+
+def format_state(pretty=False):
+    lines = []
+    for loc, var in sorted(var_db.keys()):
+        lines.append('%s: %s' % (var, var_db[(loc, var)]))
+    funcnames = sorted(set(func_return_db.keys()))
+    prevclass = ''
+    indent = ''
+    for funcid in funcnames:
+        curclass, name, sourcefile, sourceline = funcid
+        if curclass != prevclass:
+            if curclass:
+                lines.append('class %s(...):' % curclass)
+                indent = ' ' * 4
+            else:
+                indent = ''
+            prevclass = curclass
+
+        lines.append(format_sig(funcid, name, indent, pretty))
+    return '\n'.join(lines)
+
+
+def unparse_ast(node):
+    buf = StringIO()
+    Unparser(node, buf)
+    return buf.getvalue().strip()
+
+
+def format_sig(funcid, fname, indent, pretty, defaults=[]):
+    (argnames, varargs, varkw, _, kwonlyargs, _, _) = func_argid_db[funcid]
+
+    # to get defaults, parse the function, get the nodes for the
+    # defaults, then unparse them
+    try:
+        fn_ast = ast.parse(func_source_db[funcid].strip()).body[0]
+
+        # override fname if we parsed a different one
+        fname = fn_ast.name
+
+        defaults = [unparse_ast(dn) for dn in fn_ast.args.defaults]
+
+        if hasattr(fn_ast.args, 'kw_defaults'):
+            kwonly_defaults = [unparse_ast(dn) for dn in fn_ast.args.kw_defaults]
+        else:
+            kwonly_defaults = []
+    except:
+        defaults, kwonly_defaults = [], []
+    finally:
+        # pad defaults to match the length of args
+        defaults = ([None] * (len(argnames) - len(defaults))) + defaults
+        kwonly_defaults = ([None] * (len(kwonlyargs) - len(kwonly_defaults))) + kwonly_defaults
+
+    args = [('', arg, default) for (arg, default) in zip(argnames, defaults)]
+
+    if varargs:
+        args += [('*', varargs, None)]
+    elif len(kwonlyargs) > 0:
+        args += [('*', '', None)]
+    if len(kwonlyargs) > 0:
+        args += [('', arg, default) for (arg, default) in zip(kwonlyargs, kwonly_defaults)]
+    if varkw:
+        args += [('**', varkw, None)]
+
+    argstrs = []
+    for i, (prefix, arg, default) in enumerate(args):
+        argstr = prefix + arg
+
+        # Omit type of self argument.
+        if (funcid, arg) in func_arg_db and not (i == 0 and arg == 'self'):
+            argstr += ': %s' % func_arg_db[(funcid, arg)]
+
+        if default:
+            argstr += ' = %s' % default
+
+        argstrs.append(argstr)
+
+    ret = str(func_return_db.get(funcid, Unknown()))
+
+    sig = 'def %s(%s) -> %s' % (fname, ', '.join(argstrs), ret)
+    if not pretty or len(sig) <= PREFERRED_LINE_LENGTH or not args:
+        return indent + sig
+
+    else:
+        # Format into multiple lines to conserve horizontal space.
+        first = indent + 'def %s(' % fname
+        extra_indent = first.index('(') + 1
+
+        decl = indent + first
+        decl += (',\n' + indent + ' ' * extra_indent).join(argstrs)
+        decl += ')\n%s -> %s' % (indent + ' ' * (extra_indent - 4), ret)
+        return decl
+
+
+def annotate_file(path):
+    # this should be documented somewhere...
+    INDENT_TOKEN = 5
+
+    with open(path, 'r') as targetfile:
+        source = targetfile.read()
+
+    line_offsets = []
+    source_length = 0
+    for line in source.split('\n'):
+        line_offsets.append(source_length)
+        source_length = source_length + len(line) + 1
+
+    funcids = set(funcid for funcid, arg in func_arg_db)
+
+    # list of (oldstart, oldend, replacement)
+    replacements = []  # type: List[Tuple[Int, Int, String]]
+
+    for funcid in funcids:
+        class_name, name, sourcefile, def_start_line = funcid
+        if sourcefile != path:
+            continue
+
+        func_source = func_source_db[funcid]
+        tokens = list(tokenize.generate_tokens(StringIO(func_source).readline))
+        assert len(tokens) > 0
+
+        # we're making the assumption that the def at least gets to start on
+        # it's own line, which is fine for non-lambdas
+
+        if tokens[0][0] == INDENT_TOKEN:
+            indent = tokens[0][1]
+            del tokens[0]
+        else:
+            indent = ''
+
+        # Find the first indent, which should be between the end of the def
+        # and before the start of the body.  Then find the preceding colon,
+        # which should be at the end of the def.
+
+        for indent_loc in range(len(tokens)):
+            if tokens[indent_loc][0] == INDENT_TOKEN:
+                function_is_one_line = False
+                break
+            else:
+                function_is_one_line = True
+
+        if function_is_one_line:
+            # we're also making the assumption that the def has an indent on the
+            # line following the signature, which is true almost all of the time.
+            # If this is not the case, we should just leave a comment above the
+            # function, although I might not have time to do that now.
+            continue
+
+        for def_end_loc in range(indent_loc, -1, -1):
+            if tokens[def_end_loc][1] == ':':
+                break
+
+        assert def_end_loc > 0
+
+        def_end_line, def_end_col = tokens[def_end_loc][2]
+        def_end_line -= 1  # the tokenizer apparently 1-indexes lines
+        def_end_line += def_start_line
+
+        def_start_offset = line_offsets[def_start_line]
+        def_end_offset = line_offsets[def_end_line] + def_end_col
+
+        annotated_def = format_sig(funcid, name, indent, True)
+
+        replacements.append((def_start_offset, def_end_offset, annotated_def))
+
+    # ideally, we'd put this after the docstring
+    replacements.append((0, 0, "from typing import List, Dict, Set, Tuple, Callable, Pattern, Match, Union, Optional\n"))
+
+    # absurdly inefficient algorithm: replace with O(n) writer
+
+    for (start, end, replacement) in sorted(replacements, key=lambda r: r[0], reverse=True):
+        source = source[0:start] + replacement + source[end:]
+
+    return source
+
+
+def dump():
+    s = format_state(pretty=True)
+    if s:
+        print()
+        print('INFERRED TYPES:')
+        print(s)
+    reset()
+
+
+def dump_at_exit():
+    import atexit
+    atexit.register(dump)
+
+
+def get_defining_file(obj):
+    try:
+        path = os.path.abspath(inspect.getfile(obj))
+        if path.endswith('.pyc'):
+            path = path[0:-1]
+        return path
+    except:
+        return None
+
+
+def infer_var(name, value):
+    key = (None, name)
+    update_var_db(key, value)
+
+
+def infer_attrs(x):
+    if hasattr(x, '__class__'):
+        t = x.__class__
+    else:
+        t = type(x)
+    cls = t.__name__
+    typedict = t.__dict__
+    for dict in x.__dict__, typedict:
+        for attr, value in dict.items():
+            if attr in ('__dict__', '__doc__', '__module__', '__weakref__'):
+                continue
+            if type(value) is type(infer_attrs) and dict is typedict:
+                # Skip methods.
+                continue
+            key = (None, '%s.%s' % (cls, attr))
+            update_var_db(key, value)
+
+
+def infer_method_signature(class_name):
+    def decorator(func):
+        return infer_signature(func, class_name)
+    return decorator
+
+
+def infer_signature(func, class_name=''):
+    """Decorator that infers the signature of a function."""
+
+    # infer_method_signature should be idempotent
+    if hasattr(func, '__is_inferring_sig'):
+        return func
+
+    assert func.__module__ != infer_method_signature.__module__
+
+    try:
+        funcfile = get_defining_file(func)
+        funcsource, sourceline = inspect.getsourcelines(func)
+        sourceline -= 1  # getsourcelines is apparently 1-indexed
+    except:
+        return func
+
+    funcid = (class_name, func.__name__, funcfile, sourceline)
+    func_source_db[funcid] = ''.join(funcsource)
+
+    try:
+        func_argid_db[funcid] = getfullargspec(func)
+        vargs_name, kwargs_name = func_argid_db[funcid][1], func_argid_db[funcid][2]
+    except TypeError:
+        # Not supported.
+        return func
+
+    def wrapper(*args, **kwargs):
+        global is_performing_inference
+        # If we're already doing inference, we should be in our own code, not code we're checking.
+        # Not doing this check sometimes results in infinite recursion.
+
+        if is_performing_inference:
+            return func(*args, **kwargs)
+
+        expecting_type_error, got_type_error, got_exception = False, False, False
+
+        is_performing_inference = True
+        try:
+            callargs = getcallargs(func, *args, **kwargs)
+
+            # we have to handle *args and **kwargs separately
+            if vargs_name:
+                va = callargs.pop(vargs_name)
+            if kwargs_name:
+                kw = callargs.pop(kwargs_name)
+
+            arg_db = {arg: infer_value_type(value) for arg, value in callargs.items()}
+
+            # *args and **kwargs need to merge the types of all their values
+            if vargs_name:
+                arg_db[vargs_name] = union_many_types(*[infer_value_type(v) for v in va])
+            if kwargs_name:
+                arg_db[kwargs_name] = union_many_types(*[infer_value_type(v) for v in kw.values()])
+
+        except TypeError:
+            got_exception = expecting_type_error = True
+        except:
+            got_exception = True
+        finally:
+            is_performing_inference = False
+
+        try:
+            ret = func(*args, **kwargs)
+        except TypeError:
+            got_type_error = got_exception = True
+            raise
+        except:
+            got_exception = True
+            raise
+        finally:
+            if not got_exception:
+                assert not expecting_type_error
+
+                # if we didn't get a TypeError, update the actual database
+                for arg, t in arg_db.items():
+                    update_db(func_arg_db, (funcid, arg), t)
+
+                # if we got an exception, we don't have a ret
+                if not got_exception:
+                    is_performing_inference = True
+                    try:
+                        type = infer_value_type(ret)
+                        update_db(func_return_db, funcid, type)
+                    except:
+                        pass
+                    finally:
+                        is_performing_inference = False
+
+        return ret
+
+    if hasattr(func, '__name__'):
+        wrapper.__name__ = func.__name__
+    wrapper.__is_inferring_sig = True
+    return wrapper
+
+
+def infer_class(cls):
+    """Class decorator for inferring signatures of all methods of the class."""
+    for attr, value in cls.__dict__.items():
+        if type(value) is type(infer_class):
+            setattr(cls, attr, infer_method_signature(cls.__name__)(value))
+    return cls
+
+
+def infer_module(namespace):
+    if hasattr(namespace, '__dict__'):
+        namespace = namespace.__dict__
+    for name, value in list(namespace.items()):
+        if inspect.isfunction(value):
+            namespace[name] = infer_signature(value)
+        elif inspect.isclass(value):
+            namespace[name] = infer_class(value)
+
+
+def update_var_db(key, value):
+    type = infer_value_type(value)
+    update_db(var_db, key, type)
+
+
+def update_db(db, key, type):
+    if key not in db:
+        db[key] = type
+    else:
+        db[key] = combine_types(db[key], type)
+
+
+def merge_db(db, other):
+    assert id(db) != id(other)
+    for key in other.keys():
+        if key not in db:
+            db[key] = other[key]
+        else:
+            db[key] = combine_types(db[key], other[key])
+
+
+def infer_value_type(value, depth=0):
+    # Prevent infinite recursion
+    if depth > 5:
+        return Unknown()
+    depth += 1
+
+    if value is None:
+        return None
+    elif isinstance(value, list):
+        return Generic('List', [infer_value_types(value, depth)])
+    elif isinstance(value, dict):
+        keytype = infer_value_types(value.keys(), depth)
+        valuetype = infer_value_types(value.values(), depth)
+        return Generic('Dict', (keytype, valuetype))
+    elif isinstance(value, tuple):
+        if len(value) <= MAX_INFERRED_TUPLE_LENGTH:
+            return Tuple(infer_value_type(item, depth)
+                         for item in value)
+        else:
+            return Generic('TupleSequence', [infer_value_types(value, depth)])
+    elif isinstance(value, set):
+        return Generic('Set', [infer_value_types(value, depth)])
+    elif isinstance(value, types.MethodType) or isinstance(value, types.FunctionType):
+        return Instance(Callable)
+    else:
+        for t in type(value).mro():
+            if get_defining_file(t) in ignore_files:
+                continue
+            elif t is object:
+                return Any()
+            elif hasattr(types, 'InstanceType') and t is types.InstanceType:
+                return Any()
+            else:
+                return Instance(t)
+        else:
+            return Any()
+
+
+def infer_value_types(values, depth=0):
+    """Infer a single type for an iterable of values.
+
+    >>> infer_value_types((1, 'x'))
+    Union(int, str)
+    >>> infer_value_types([])
+    Unknown
+    """
+    inferred = Unknown()
+    for value in sample(values):
+        type = infer_value_type(value, depth)
+        inferred = combine_types(inferred, type)
+    return inferred
+
+
+def sample(values):
+    # TODO only return a sample of values
+    return list(values)
+
+
+def union_many_types(*types):
+    union = Unknown()
+    for t in types:
+        union = combine_types(union, t)
+    return union
+
+
+def combine_types(x, y):
+    """Perform a union of two types.
+
+    >>> combine_types(Instance(int), None)
+    Optional[int]
+    """
+    if isinstance(x, Unknown):
+        return y
+    if isinstance(y, Unknown):
+        return x
+    if isinstance(x, Any):
+        return x
+    if isinstance(y, Any):
+        return y
+    if isinstance(x, Union):
+        return combine_either(x, y)
+    if isinstance(y, Union):
+        return combine_either(y, x)
+    if x == y:
+        return x
+    return simplify_either([x], [y])
+
+
+def combine_either(either, x):
+    if isinstance(x, Union):
+        xtypes = x.types
+    else:
+        xtypes = [x]
+    return simplify_either(either.types, xtypes)
+
+
+def simplify_either(x, y):
+    numerics = [Instance(int), Instance(float), Instance(complex)]
+
+    # TODO this is O(n**2); use an O(n) algorithm instead
+    result = list(x)
+    for type in y:
+        if isinstance(type, Generic):
+            for i, rt in enumerate(result):
+                if isinstance(rt, Generic) and type.typename == rt.typename:
+                    result[i] = Generic(rt.typename,
+                                        (combine_types(t, s)
+                                         for t, s in zip(type.args, rt.args)))
+                    break
+            else:
+                result.append(type)
+        elif isinstance(type, Tuple):
+            for i, rt in enumerate(result):
+                if isinstance(rt, Tuple) and len(type) == len(rt):
+                    result[i] = Tuple(combine_types(t, s)
+                                      for t, s in zip(type.itemtypes,
+                                                      rt.itemtypes))
+                    break
+            else:
+                result.append(type)
+        elif type in numerics:
+            for i, rt in enumerate(result):
+                if rt in numerics:
+                    result[i] = numerics[max(numerics.index(rt), numerics.index(type))]
+                    break
+            else:
+                result.append(type)
+        elif isinstance(type, Instance):
+            for i, rt in enumerate(result):
+                if isinstance(rt, Instance):
+                    # Union[A, SubclassOfA] -> A
+                    # Union[A, A] -> A, because issubclass(A, A) == True,
+                    if issubclass(type.typeobj, rt.typeobj):
+                        break
+                    elif issubclass(rt.typeobj, type.typeobj):
+                        result[i] = type
+                        break
+            else:
+                result.append(type)
+        elif type not in result:
+            result.append(type)
+
+    if len(result) > 1:
+        return Union(result)
+    else:
+        return result[0]
+
+
+class TypeBase(object):
+    """Abstract base class of all type objects.
+
+    Type objects use isinstance tests librarally -- they don't support duck
+    typing well.
+    """
+
+    def __eq__(self, other):
+        if type(other) is not type(self):
+            return False
+        for attr in self.__dict__:
+            if getattr(other, attr) != getattr(self, attr):
+                return False
+        return True
+
+    def __ne__(self, other):
+        return not self == other
+
+    def __repr__(self):
+        return str(self)
+
+
+class Instance(TypeBase):
+    def __init__(self, typeobj):
+        assert not inspect.isclass(typeobj) or not issubclass(typeobj, TypeBase)
+        self.typeobj = typeobj
+
+    def __str__(self):
+        # cheat on regular expression objects which have weird class names
+        # to be consistent with typing.py
+        if self.typeobj == Pattern:
+            return "Pattern"
+        elif self.typeobj == Match:
+            return "Match"
+        else:
+            return self.typeobj.__name__
+
+    def __repr__(self):
+        return 'Instance(%s)' % self
+
+
+class Generic(TypeBase):
+    def __init__(self, typename, args):
+        self.typename = typename
+        self.args = tuple(args)
+
+    def __str__(self):
+        return '%s[%s]' % (self.typename, ', '.join(str(t)
+                                                    for t in self.args))
+
+
+class Tuple(TypeBase):
+    def __init__(self, itemtypes):
+        self.itemtypes = tuple(itemtypes)
+
+    def __len__(self):
+        return len(self.itemtypes)
+
+    def __str__(self):
+        return 'Tuple[%s]' % (', '.join(str(t) for t in self.itemtypes))
+
+
+class Union(TypeBase):
+    def __init__(self, types):
+        assert len(types) > 1
+        self.types = tuple(types)
+
+    def __eq__(self, other):
+        if type(other) is not Union:
+            return False
+        # TODO this is O(n**2); use an O(n) algorithm instead
+        for t in self.types:
+            if t not in other.types:
+                return False
+        for t in other.types:
+            if t not in self.types:
+                return False
+        return True
+
+    def __str__(self):
+        types = list(self.types)
+        if str != bytes:  # on Python 2 str == bytes
+            if Instance(bytes) in types and Instance(str) in types:
+                # we Union[bytes, str] -> AnyStr as late as possible so we avoid
+                # corner cases like subclasses of bytes or str
+                types.remove(Instance(bytes))
+                types.remove(Instance(str))
+                types.append(Instance(AnyStr))
+        if len(types) == 1:
+            return str(types[0])
+        elif len(types) == 2 and None in types:
+            type = [t for t in types if t is not None][0]
+            return 'Optional[%s]' % type
+        else:
+            return 'Union[%s]' % (', '.join(sorted(str(t) for t in types)))
+
+
+class Unknown(TypeBase):
+    def __str__(self):
+        return 'Unknown'
+
+    def __repr__(self):
+        return 'Unknown()'
+
+
+class Any(TypeBase):
+    def __str__(self):
+        return 'Any'
+
+    def __repr__(self):
+        return 'Any()'
+
+
+class AnyStr(object): pass
+class Callable(object): pass
+import re
+Pattern = type(re.compile(u''))
+Match = type(re.match(u'', u''))
diff --git a/pinfer/test_pinfer.py b/pinfer/test_pinfer.py
new file mode 100644
index 0000000..d6168db
--- /dev/null
+++ b/pinfer/test_pinfer.py
@@ -0,0 +1,302 @@
+"""Test cases for the infer module"""
+
+import unittest
+
+from pinfer import Instance, Generic, Tuple, Union, Unknown
+import pinfer
+
+
+class TestInfer(unittest.TestCase):
+    def setUp(self):
+        self.int = Instance(int)
+        self.float = Instance(float)
+
+    def tearDown(self):
+        pinfer.reset()
+
+    def test_instance(self):
+        i = self.int
+        self.assertEqual(i.typeobj, int)
+        self.assertEqual(str(i), 'int')
+        self.assertEqual(repr(i), 'Instance(int)')
+
+        self.assertTrue(i == Instance(int))
+        self.assertFalse(i != Instance(int))
+        self.assertTrue(i != self.float)
+        self.assertFalse(i == self.float)
+        self.assertNotEqual(i, None)
+
+    def test_generic_with_one_arg(self):
+        g = Generic('List', [self.int])
+        self.assertEqual(g.typename, 'List')
+        self.assertEqual(str(g.args), '(Instance(int),)')
+        self.assertEqual(str(g), 'List[int]')
+        self.assertEqual(repr(g), 'List[int]')
+
+        self.assertEqual(g, Generic('List', [self.int]))
+        self.assertNotEqual(g, Generic('Set', [self.int]))
+        self.assertNotEqual(g, Generic('List', [self.float]))
+        self.assertNotEqual(g, self.int)
+
+    def test_generic_with_two_args(self):
+        g = Generic('Dict', (self.int, self.float))
+        self.assertEqual(g.typename, 'Dict')
+        self.assertEqual(str(g), 'Dict[int, float]')
+
+    def test_tuple(self):
+        t0 = Tuple(())
+        t1 = Tuple([self.int])
+        t2 = Tuple((self.float, self.int))
+        self.assertEqual(t0.itemtypes, ())
+        self.assertEqual(str(t1.itemtypes[0]), 'int')
+        self.assertEqual(str(t2.itemtypes[0]), 'float')
+        self.assertEqual(str(t2.itemtypes[1]), 'int')
+        self.assertEqual(str(t0), 'Tuple[]')
+        self.assertEqual(str(t1), 'Tuple[int]')
+        self.assertEqual(str(t2), 'Tuple[float, int]')
+
+        self.assertEqual(t1, Tuple([self.int]))
+        self.assertNotEqual(t1, Tuple([self.float]))
+        self.assertNotEqual(t1, Tuple([self.int, self.int]))
+        self.assertNotEqual(t1, self.int)
+
+    def test_either(self):
+        i = self.int
+        f = self.float
+        s = Instance(str)
+
+        e2 = Union((i, f))
+        self.assertEqual(len(e2.types), 2)
+        self.assertEqual(str(e2), 'Union[float, int]')
+
+        self.assertEqual(e2, Union((i, f)))
+        self.assertEqual(e2, Union((f, i)))
+        self.assertNotEqual(e2, Union((i, s)))
+        self.assertNotEqual(e2, Union((i, f, s)))
+        self.assertNotEqual(Union((i, f, s)), e2)
+        self.assertNotEqual(e2, i)
+
+    def test_either_as_optional(self):
+        optint = Union((self.int, None))
+        self.assertEqual(str(optint), 'Optional[int]')
+        optfloat = Union((None, self.float))
+        self.assertEqual(str(optfloat), 'Optional[float]')
+        eithernone = Union((self.int, self.float, None))
+        self.assertEqual(str(eithernone), 'Union[None, float, int]')
+
+    def test_unknown(self):
+        unknown = Unknown()
+        self.assertEqual(str(unknown), 'Unknown')
+        self.assertEqual(repr(unknown), 'Unknown()')
+
+        self.assertEqual(unknown, Unknown())
+        self.assertNotEqual(unknown, self.int)
+
+    def test_combine_types(self):
+        i = self.int
+        f = self.float
+        s = Instance(str)
+        c = Instance(complex)
+        class Foo: pass
+        o = Instance(Foo)
+
+        # Simple types
+        self.assert_combine(i, i, i)
+        self.assert_combine(s, s, s)
+        self.assert_combine(i, s, Union((i, s)))
+        self.assert_combine(i, None, Union((i, None)))
+        # Unknowns
+        self.assert_combine(i, Unknown(), i)
+        self.assert_combine(Unknown(), Unknown(), Unknown())
+        # Union types
+        self.assert_combine(o, Union((f, s)), Union((o, f, s)))
+        self.assert_combine(i, Union((i, s)), Union((i, s)))
+        self.assert_combine(Union((o, f)), Union((o, s)), Union((o, f, s)))
+        # Tuple types
+        self.assert_combine(Tuple([i, i]), Tuple([i, i]), Tuple([i, i]))
+        self.assert_combine(Tuple([i, i]), Tuple([o, s]),
+                            Tuple([Union([o, i]), Union([s, i])]))
+        # Numeric types
+        self.assert_combine(i, f, f)
+        self.assert_combine(i, c, c)
+        self.assert_combine(c, f, c)
+        # Unions with numerics
+        self.assert_combine(i, Union((o, f)), Union((o, f)))
+        self.assert_combine(Union((o, f)), i, Union((o, f)))
+        self.assert_combine(Union((o, i)), f, Union((o, f)))
+        # Tuples with numerics
+        self.assert_combine(Tuple([i, i]), Tuple([f, i]), Tuple([f, i]))
+        self.assert_combine(Tuple([i, i]), Tuple([f, o]), Tuple([f, Union((i, o))]))
+        self.assert_combine(Tuple([f, i]), Tuple([i, o]), Tuple([f, Union((i, o))]))
+
+    def test_combine_special_cases(self):
+        i = self.int
+        f = self.float
+        u = Unknown()
+        def list_(x):
+            return Generic('List', [x])
+        # Simplify generic types.
+        self.assert_combine(list_(i), list_(u), list_(i))
+
+    def assert_combine(self, t, s, combined):
+        self.assertEqual(pinfer.combine_types(t, s), combined)
+        self.assertEqual(pinfer.combine_types(s, t), combined)
+
+    def test_sample(self):
+        sample = pinfer.sample
+        self.assertEqual(sample(()), [])
+        self.assertEqual(sample((1, 2)), [1, 2])
+        self.assertEqual(sample([]), [])
+        self.assertEqual(sample([1]), [1])
+        self.assertEqual(sample([1, 2]), [1, 2])
+        # TODO larger collections
+
+    def test_infer_simple_value_type(self):
+        self.assert_infer_type(1, 'int')
+        self.assert_infer_type('', 'str')
+        self.assert_infer_type(None, 'None')
+
+    def test_infer_collection_type(self):
+        # List
+        self.assert_infer_type([], 'List[Unknown]')
+        self.assert_infer_type([1], 'List[int]')
+        self.assert_infer_type([1, None], 'List[Optional[int]]')
+        # Dict
+        self.assert_infer_type({1: 'x', 2: None},
+                               'Dict[int, Optional[str]]')
+        # Set
+        self.assert_infer_type({1, None}, 'Set[Optional[int]]')
+        # Tuple
+        self.assert_infer_type((1, 'x'), 'Tuple[int, str]')
+        self.assert_infer_type((1, None) * 100, 'TupleSequence[Optional[int]]')
+
+    def assert_infer_type(self, value, type):
+        self.assertEqual(str(pinfer.infer_value_type(value)), type)
+
+    def test_infer_variables(self):
+        pinfer.infer_var('x', 1)
+        self.assert_infer_state('x: int')
+        pinfer.infer_var('x', 1)
+        pinfer.infer_var('x', None)
+        pinfer.infer_var('y', 1.1)
+        self.assert_infer_state('x: Optional[int]\n'
+                                'y: float')
+
+    def test_infer_instance_var(self):
+        class A: pass
+        a = A()
+        a.x = 1
+        a.y = 'x'
+        pinfer.infer_attrs(a)
+        self.assert_infer_state('A.x: int\n'
+                                'A.y: str')
+
+    def test_infer_class_var(self):
+        class A:
+            x = 1.1
+        pinfer.infer_attrs(A())
+        self.assert_infer_state('A.x: float')
+
+    def test_infer_function_attr(self):
+        class A:
+            def f(self): pass
+        a = A()
+        a.g = lambda x: 1
+        pinfer.infer_attrs(a)
+        self.assert_infer_state('A.g: Callable')
+
+    def test_infer_simple_function_signature(self):
+        @pinfer.infer_signature
+        def f(a):
+            return 'x'
+        f(1)
+        f(None)
+        self.assertEqual(f.__name__, 'f')
+        self.assert_infer_state('def f(a: Optional[int]) -> str')
+
+    def test_infer_function_with_two_args(self):
+        @pinfer.infer_signature
+        def f(x, y):
+            return x * y
+        f(1, 2)
+        f(1, 'x')
+        self.assert_infer_state(
+            'def f(x: int, y: Union[int, str]) -> Union[int, str]')
+
+    def test_infer_method(self):
+        class A:
+            @pinfer.infer_signature
+            def f(self, x): pass
+        A().f('x')
+        self.assert_infer_state('def f(self, x: str) -> None')
+
+    def test_infer_default_arg_values(self):
+        @pinfer.infer_signature
+        def f(x=1, y=None): pass
+        f()
+        self.assert_infer_state('def f(x: int, y: None) -> None')
+        f('x')
+        f('x', 1.1)
+        f()
+        self.assert_infer_state(
+            'def f(x: Union[int, str], y: Optional[float]) -> None')
+
+    def test_infer_varargs(self):
+        @pinfer.infer_signature
+        def f(x, *y): pass
+        f(1)
+        f(1, 'x', None)
+        self.assert_infer_state('def f(x: int, *y: Optional[str]) -> None')
+        f(1)
+        self.assert_infer_state('def f(x: int, *y: Unknown) -> None')
+
+    def test_infer_keyword_args(self):
+        @pinfer.infer_signature
+        def f(x): pass
+        f(x=1)
+        self.assert_infer_state('def f(x: int) -> None')
+
+        @pinfer.infer_signature
+        def f(x='x'): pass
+        f(x=1)
+        self.assert_infer_state('def f(x: int) -> None')
+
+    def test_infer_keyword_varargs(self):
+        @pinfer.infer_signature
+        def f(a, **kwargs): pass
+        f(None, x=1, y='x')
+        self.assert_infer_state(
+            'def f(a: None, **kwargs: Union[int, str]) -> None')
+
+    def test_infer_class(self):
+        @pinfer.infer_class
+        class A:
+            def f(self, x): return 0
+        A().f('x')
+        self.assert_infer_state('class A(...):\n'
+                                '    def f(self, x: str) -> int')
+
+        @pinfer.infer_class
+        class A:
+            def f(self, x): return 0
+        @pinfer.infer_class
+        class B:
+            def f(self): pass
+            def g(self): pass
+        A().f('')
+        B().f()
+        B().g()
+        self.assert_infer_state('class A(...):\n'
+                                '    def f(self, x: str) -> int\n'
+                                'class B(...):\n'
+                                '    def f(self) -> None\n'
+                                '    def g(self) -> None')
+
+    def assert_infer_state(self, expected):
+        state = pinfer.format_state()
+        self.assertEqual(state, expected)
+        pinfer.reset()
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pinfer/test_pinfer3.py b/pinfer/test_pinfer3.py
new file mode 100644
index 0000000..688e8c0
--- /dev/null
+++ b/pinfer/test_pinfer3.py
@@ -0,0 +1,31 @@
+""" tests cases that require python3 syntax """
+
+import unittest
+import pinfer
+
+# Include all of the shared unit tests
+from test_pinfer import TestInfer
+
+
+class TestInfer3(unittest.TestCase):
+    def test_infer_keyword_only_args(self):
+        # decorators break the parsing
+        def f(x, *, y=0): pass
+        f = pinfer.infer_signature(f)
+        f(1, y='x')
+        self.assert_infer_state(
+            'def f(x: int, *, y: str = 0) -> None')
+
+        def f(*, x=None, y=None): pass
+        f = pinfer.infer_signature(f)
+        f(y='x')
+        self.assert_infer_state(
+            'def f(*, x: None = None, y: str = None) -> None')
+
+    def assert_infer_state(self, expected):
+        state = pinfer.format_state()
+        self.assertEqual(state, expected)
+        pinfer.reset()
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pinfer/unparse.py b/pinfer/unparse.py
new file mode 100644
index 0000000..6e1e493
--- /dev/null
+++ b/pinfer/unparse.py
@@ -0,0 +1,610 @@
+# From Python 2's Demo/parser/unparse.py
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+# 2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved
+
+"Usage: unparse.py <path to source file>"
+import sys
+import ast
+import cStringIO
+import os
+
+# Large float and imaginary literals get turned into infinities in the AST.
+# We unparse those infinities to INFSTR.
+INFSTR = "1e" + repr(sys.float_info.max_10_exp + 1)
+
+def interleave(inter, f, seq):
+    """Call f on each item in seq, calling inter() in between.
+    """
+    seq = iter(seq)
+    try:
+        f(next(seq))
+    except StopIteration:
+        pass
+    else:
+        for x in seq:
+            inter()
+            f(x)
+
+class Unparser:
+    """Methods in this class recursively traverse an AST and
+    output source code for the abstract syntax; original formatting
+    is disregarded. """
+
+    def __init__(self, tree, file = sys.stdout):
+        """Unparser(tree, file=sys.stdout) -> None.
+         Print the source for tree to file."""
+        self.f = file
+        self.future_imports = []
+        self._indent = 0
+        self.dispatch(tree)
+        self.f.write("")
+        self.f.flush()
+
+    def fill(self, text = ""):
+        "Indent a piece of text, according to the current indentation level"
+        self.f.write("\n"+"    "*self._indent + text)
+
+    def write(self, text):
+        "Append a piece of text to the current line."
+        self.f.write(text)
+
+    def enter(self):
+        "Print ':', and increase the indentation."
+        self.write(":")
+        self._indent += 1
+
+    def leave(self):
+        "Decrease the indentation level."
+        self._indent -= 1
+
+    def dispatch(self, tree):
+        "Dispatcher function, dispatching tree type T to method _T."
+        if isinstance(tree, list):
+            for t in tree:
+                self.dispatch(t)
+            return
+        meth = getattr(self, "_"+tree.__class__.__name__)
+        meth(tree)
+
+
+    ############### Unparsing methods ######################
+    # There should be one method per concrete grammar type #
+    # Constructors should be grouped by sum type. Ideally, #
+    # this would follow the order in the grammar, but      #
+    # currently doesn't.                                   #
+    ########################################################
+
+    def _Module(self, tree):
+        for stmt in tree.body:
+            self.dispatch(stmt)
+
+    # stmt
+    def _Expr(self, tree):
+        self.fill()
+        self.dispatch(tree.value)
+
+    def _Import(self, t):
+        self.fill("import ")
+        interleave(lambda: self.write(", "), self.dispatch, t.names)
+
+    def _ImportFrom(self, t):
+        # A from __future__ import may affect unparsing, so record it.
+        if t.module and t.module == '__future__':
+            self.future_imports.extend(n.name for n in t.names)
+
+        self.fill("from ")
+        self.write("." * t.level)
+        if t.module:
+            self.write(t.module)
+        self.write(" import ")
+        interleave(lambda: self.write(", "), self.dispatch, t.names)
+
+    def _Assign(self, t):
+        self.fill()
+        for target in t.targets:
+            self.dispatch(target)
+            self.write(" = ")
+        self.dispatch(t.value)
+
+    def _AugAssign(self, t):
+        self.fill()
+        self.dispatch(t.target)
+        self.write(" "+self.binop[t.op.__class__.__name__]+"= ")
+        self.dispatch(t.value)
+
+    def _Return(self, t):
+        self.fill("return")
+        if t.value:
+            self.write(" ")
+            self.dispatch(t.value)
+
+    def _Pass(self, t):
+        self.fill("pass")
+
+    def _Break(self, t):
+        self.fill("break")
+
+    def _Continue(self, t):
+        self.fill("continue")
+
+    def _Delete(self, t):
+        self.fill("del ")
+        interleave(lambda: self.write(", "), self.dispatch, t.targets)
+
+    def _Assert(self, t):
+        self.fill("assert ")
+        self.dispatch(t.test)
+        if t.msg:
+            self.write(", ")
+            self.dispatch(t.msg)
+
+    def _Exec(self, t):
+        self.fill("exec ")
+        self.dispatch(t.body)
+        if t.globals:
+            self.write(" in ")
+            self.dispatch(t.globals)
+        if t.locals:
+            self.write(", ")
+            self.dispatch(t.locals)
+
+    def _Print(self, t):
+        self.fill("print ")
+        do_comma = False
+        if t.dest:
+            self.write(">>")
+            self.dispatch(t.dest)
+            do_comma = True
+        for e in t.values:
+            if do_comma:self.write(", ")
+            else:do_comma=True
+            self.dispatch(e)
+        if not t.nl:
+            self.write(",")
+
+    def _Global(self, t):
+        self.fill("global ")
+        interleave(lambda: self.write(", "), self.write, t.names)
+
+    def _Yield(self, t):
+        self.write("(")
+        self.write("yield")
+        if t.value:
+            self.write(" ")
+            self.dispatch(t.value)
+        self.write(")")
+
+    def _Raise(self, t):
+        self.fill('raise ')
+        if t.type:
+            self.dispatch(t.type)
+        if t.inst:
+            self.write(", ")
+            self.dispatch(t.inst)
+        if t.tback:
+            self.write(", ")
+            self.dispatch(t.tback)
+
+    def _TryExcept(self, t):
+        self.fill("try")
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
+        for ex in t.handlers:
+            self.dispatch(ex)
+        if t.orelse:
+            self.fill("else")
+            self.enter()
+            self.dispatch(t.orelse)
+            self.leave()
+
+    def _TryFinally(self, t):
+        if len(t.body) == 1 and isinstance(t.body[0], ast.TryExcept):
+            # try-except-finally
+            self.dispatch(t.body)
+        else:
+            self.fill("try")
+            self.enter()
+            self.dispatch(t.body)
+            self.leave()
+
+        self.fill("finally")
+        self.enter()
+        self.dispatch(t.finalbody)
+        self.leave()
+
+    def _ExceptHandler(self, t):
+        self.fill("except")
+        if t.type:
+            self.write(" ")
+            self.dispatch(t.type)
+        if t.name:
+            self.write(" as ")
+            self.dispatch(t.name)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
+    def _ClassDef(self, t):
+        self.write("\n")
+        for deco in t.decorator_list:
+            self.fill("@")
+            self.dispatch(deco)
+        self.fill("class "+t.name)
+        if t.bases:
+            self.write("(")
+            for a in t.bases:
+                self.dispatch(a)
+                self.write(", ")
+            self.write(")")
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
+    def _FunctionDef(self, t):
+        self.write("\n")
+        for deco in t.decorator_list:
+            self.fill("@")
+            self.dispatch(deco)
+        self.fill("def "+t.name + "(")
+        self.dispatch(t.args)
+        self.write(")")
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
+    def _For(self, t):
+        self.fill("for ")
+        self.dispatch(t.target)
+        self.write(" in ")
+        self.dispatch(t.iter)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+        if t.orelse:
+            self.fill("else")
+            self.enter()
+            self.dispatch(t.orelse)
+            self.leave()
+
+    def _If(self, t):
+        self.fill("if ")
+        self.dispatch(t.test)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+        # collapse nested ifs into equivalent elifs.
+        while (t.orelse and len(t.orelse) == 1 and
+               isinstance(t.orelse[0], ast.If)):
+            t = t.orelse[0]
+            self.fill("elif ")
+            self.dispatch(t.test)
+            self.enter()
+            self.dispatch(t.body)
+            self.leave()
+        # final else
+        if t.orelse:
+            self.fill("else")
+            self.enter()
+            self.dispatch(t.orelse)
+            self.leave()
+
+    def _While(self, t):
+        self.fill("while ")
+        self.dispatch(t.test)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+        if t.orelse:
+            self.fill("else")
+            self.enter()
+            self.dispatch(t.orelse)
+            self.leave()
+
+    def _With(self, t):
+        self.fill("with ")
+        self.dispatch(t.context_expr)
+        if t.optional_vars:
+            self.write(" as ")
+            self.dispatch(t.optional_vars)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
+    # expr
+    def _Str(self, tree):
+        # if from __future__ import unicode_literals is in effect,
+        # then we want to output string literals using a 'b' prefix
+        # and unicode literals with no prefix.
+        if "unicode_literals" not in self.future_imports:
+            self.write(repr(tree.s))
+        elif isinstance(tree.s, str):
+            self.write("b" + repr(tree.s))
+        elif isinstance(tree.s, unicode):
+            self.write(repr(tree.s).lstrip("u"))
+        else:
+            assert False, "shouldn't get here"
+
+    def _Name(self, t):
+        self.write(t.id)
+
+    def _Repr(self, t):
+        self.write("`")
+        self.dispatch(t.value)
+        self.write("`")
+
+    def _Num(self, t):
+        repr_n = repr(t.n)
+        # Parenthesize negative numbers, to avoid turning (-1)**2 into -1**2.
+        if repr_n.startswith("-"):
+            self.write("(")
+        # Substitute overflowing decimal literal for AST infinities.
+        self.write(repr_n.replace("inf", INFSTR))
+        if repr_n.startswith("-"):
+            self.write(")")
+
+    def _List(self, t):
+        self.write("[")
+        interleave(lambda: self.write(", "), self.dispatch, t.elts)
+        self.write("]")
+
+    def _ListComp(self, t):
+        self.write("[")
+        self.dispatch(t.elt)
+        for gen in t.generators:
+            self.dispatch(gen)
+        self.write("]")
+
+    def _GeneratorExp(self, t):
+        self.write("(")
+        self.dispatch(t.elt)
+        for gen in t.generators:
+            self.dispatch(gen)
+        self.write(")")
+
+    def _SetComp(self, t):
+        self.write("{")
+        self.dispatch(t.elt)
+        for gen in t.generators:
+            self.dispatch(gen)
+        self.write("}")
+
+    def _DictComp(self, t):
+        self.write("{")
+        self.dispatch(t.key)
+        self.write(": ")
+        self.dispatch(t.value)
+        for gen in t.generators:
+            self.dispatch(gen)
+        self.write("}")
+
+    def _comprehension(self, t):
+        self.write(" for ")
+        self.dispatch(t.target)
+        self.write(" in ")
+        self.dispatch(t.iter)
+        for if_clause in t.ifs:
+            self.write(" if ")
+            self.dispatch(if_clause)
+
+    def _IfExp(self, t):
+        self.write("(")
+        self.dispatch(t.body)
+        self.write(" if ")
+        self.dispatch(t.test)
+        self.write(" else ")
+        self.dispatch(t.orelse)
+        self.write(")")
+
+    def _Set(self, t):
+        assert(t.elts) # should be at least one element
+        self.write("{")
+        interleave(lambda: self.write(", "), self.dispatch, t.elts)
+        self.write("}")
+
+    def _Dict(self, t):
+        self.write("{")
+        def write_pair(pair):
+            (k, v) = pair
+            self.dispatch(k)
+            self.write(": ")
+            self.dispatch(v)
+        interleave(lambda: self.write(", "), write_pair, zip(t.keys, t.values))
+        self.write("}")
+
+    def _Tuple(self, t):
+        self.write("(")
+        if len(t.elts) == 1:
+            (elt,) = t.elts
+            self.dispatch(elt)
+            self.write(",")
+        else:
+            interleave(lambda: self.write(", "), self.dispatch, t.elts)
+        self.write(")")
+
+    unop = {"Invert":"~", "Not": "not", "UAdd":"+", "USub":"-"}
+    def _UnaryOp(self, t):
+        self.write("(")
+        self.write(self.unop[t.op.__class__.__name__])
+        self.write(" ")
+        # If we're applying unary minus to a number, parenthesize the number.
+        # This is necessary: -2147483648 is different from -(2147483648) on
+        # a 32-bit machine (the first is an int, the second a long), and
+        # -7j is different from -(7j).  (The first has real part 0.0, the second
+        # has real part -0.0.)
+        if isinstance(t.op, ast.USub) and isinstance(t.operand, ast.Num):
+            self.write("(")
+            self.dispatch(t.operand)
+            self.write(")")
+        else:
+            self.dispatch(t.operand)
+        self.write(")")
+
+    binop = { "Add":"+", "Sub":"-", "Mult":"*", "Div":"/", "Mod":"%",
+                    "LShift":"<<", "RShift":">>", "BitOr":"|", "BitXor":"^", "BitAnd":"&",
+                    "FloorDiv":"//", "Pow": "**"}
+    def _BinOp(self, t):
+        self.write("(")
+        self.dispatch(t.left)
+        self.write(" " + self.binop[t.op.__class__.__name__] + " ")
+        self.dispatch(t.right)
+        self.write(")")
+
+    cmpops = {"Eq":"==", "NotEq":"!=", "Lt":"<", "LtE":"<=", "Gt":">", "GtE":">=",
+                        "Is":"is", "IsNot":"is not", "In":"in", "NotIn":"not in"}
+    def _Compare(self, t):
+        self.write("(")
+        self.dispatch(t.left)
+        for o, e in zip(t.ops, t.comparators):
+            self.write(" " + self.cmpops[o.__class__.__name__] + " ")
+            self.dispatch(e)
+        self.write(")")
+
+    boolops = {ast.And: 'and', ast.Or: 'or'}
+    def _BoolOp(self, t):
+        self.write("(")
+        s = " %s " % self.boolops[t.op.__class__]
+        interleave(lambda: self.write(s), self.dispatch, t.values)
+        self.write(")")
+
+    def _Attribute(self,t):
+        self.dispatch(t.value)
+        # Special case: 3.__abs__() is a syntax error, so if t.value
+        # is an integer literal then we need to either parenthesize
+        # it or add an extra space to get 3 .__abs__().
+        if isinstance(t.value, ast.Num) and isinstance(t.value.n, int):
+            self.write(" ")
+        self.write(".")
+        self.write(t.attr)
+
+    def _Call(self, t):
+        self.dispatch(t.func)
+        self.write("(")
+        comma = False
+        for e in t.args:
+            if comma: self.write(", ")
+            else: comma = True
+            self.dispatch(e)
+        for e in t.keywords:
+            if comma: self.write(", ")
+            else: comma = True
+            self.dispatch(e)
+        if t.starargs:
+            if comma: self.write(", ")
+            else: comma = True
+            self.write("*")
+            self.dispatch(t.starargs)
+        if t.kwargs:
+            if comma: self.write(", ")
+            else: comma = True
+            self.write("**")
+            self.dispatch(t.kwargs)
+        self.write(")")
+
+    def _Subscript(self, t):
+        self.dispatch(t.value)
+        self.write("[")
+        self.dispatch(t.slice)
+        self.write("]")
+
+    # slice
+    def _Ellipsis(self, t):
+        self.write("...")
+
+    def _Index(self, t):
+        self.dispatch(t.value)
+
+    def _Slice(self, t):
+        if t.lower:
+            self.dispatch(t.lower)
+        self.write(":")
+        if t.upper:
+            self.dispatch(t.upper)
+        if t.step:
+            self.write(":")
+            self.dispatch(t.step)
+
+    def _ExtSlice(self, t):
+        interleave(lambda: self.write(', '), self.dispatch, t.dims)
+
+    # others
+    def _arguments(self, t):
+        first = True
+        # normal arguments
+        defaults = [None] * (len(t.args) - len(t.defaults)) + t.defaults
+        for a,d in zip(t.args, defaults):
+            if first:first = False
+            else: self.write(", ")
+            self.dispatch(a),
+            if d:
+                self.write("=")
+                self.dispatch(d)
+
+        # varargs
+        if t.vararg:
+            if first:first = False
+            else: self.write(", ")
+            self.write("*")
+            self.write(t.vararg)
+
+        # kwargs
+        if t.kwarg:
+            if first:first = False
+            else: self.write(", ")
+            self.write("**"+t.kwarg)
+
+    def _keyword(self, t):
+        self.write(t.arg)
+        self.write("=")
+        self.dispatch(t.value)
+
+    def _Lambda(self, t):
+        self.write("(")
+        self.write("lambda ")
+        self.dispatch(t.args)
+        self.write(": ")
+        self.dispatch(t.body)
+        self.write(")")
+
+    def _alias(self, t):
+        self.write(t.name)
+        if t.asname:
+            self.write(" as "+t.asname)
+
+def roundtrip(filename, output=sys.stdout):
+    with open(filename, "r") as pyfile:
+        source = pyfile.read()
+    tree = compile(source, filename, "exec", ast.PyCF_ONLY_AST)
+    Unparser(tree, output)
+
+
+
+def testdir(a):
+    try:
+        names = [n for n in os.listdir(a) if n.endswith('.py')]
+    except OSError:
+        sys.stderr.write("Directory not readable: %s" % a)
+    else:
+        for n in names:
+            fullname = os.path.join(a, n)
+            if os.path.isfile(fullname):
+                output = cStringIO.StringIO()
+                print 'Testing %s' % fullname
+                try:
+                    roundtrip(fullname, output)
+                except Exception as e:
+                    print '  Failed to compile, exception is %s' % repr(e)
+            elif os.path.isdir(fullname):
+                testdir(fullname)
+
+def main(args):
+    if args[0] == '--testdir':
+        for a in args[1:]:
+            testdir(a)
+    else:
+        for a in args:
+            roundtrip(a)
+
+if __name__=='__main__':
+    main(sys.argv[1:])
diff --git a/pinfer/unparse3.py b/pinfer/unparse3.py
new file mode 100644
index 0000000..0936cb2
--- /dev/null
+++ b/pinfer/unparse3.py
@@ -0,0 +1,610 @@
+# From Python 3's Tools/parser/unparse.py
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+# 2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved
+
+"Usage: unparse.py <path to source file>"
+import sys
+import ast
+import tokenize
+import io
+import os
+
+# Large float and imaginary literals get turned into infinities in the AST.
+# We unparse those infinities to INFSTR.
+INFSTR = "1e" + repr(sys.float_info.max_10_exp + 1)
+
+def interleave(inter, f, seq):
+    """Call f on each item in seq, calling inter() in between.
+    """
+    seq = iter(seq)
+    try:
+        f(next(seq))
+    except StopIteration:
+        pass
+    else:
+        for x in seq:
+            inter()
+            f(x)
+
+class Unparser:
+    """Methods in this class recursively traverse an AST and
+    output source code for the abstract syntax; original formatting
+    is disregarded. """
+
+    def __init__(self, tree, file = sys.stdout):
+        """Unparser(tree, file=sys.stdout) -> None.
+         Print the source for tree to file."""
+        self.f = file
+        self._indent = 0
+        self.dispatch(tree)
+        print("", file=self.f)
+        self.f.flush()
+
+    def fill(self, text = ""):
+        "Indent a piece of text, according to the current indentation level"
+        self.f.write("\n"+"    "*self._indent + text)
+
+    def write(self, text):
+        "Append a piece of text to the current line."
+        self.f.write(text)
+
+    def enter(self):
+        "Print ':', and increase the indentation."
+        self.write(":")
+        self._indent += 1
+
+    def leave(self):
+        "Decrease the indentation level."
+        self._indent -= 1
+
+    def dispatch(self, tree):
+        "Dispatcher function, dispatching tree type T to method _T."
+        if isinstance(tree, list):
+            for t in tree:
+                self.dispatch(t)
+            return
+        meth = getattr(self, "_"+tree.__class__.__name__)
+        meth(tree)
+
+
+    ############### Unparsing methods ######################
+    # There should be one method per concrete grammar type #
+    # Constructors should be grouped by sum type. Ideally, #
+    # this would follow the order in the grammar, but      #
+    # currently doesn't.                                   #
+    ########################################################
+
+    def _Module(self, tree):
+        for stmt in tree.body:
+            self.dispatch(stmt)
+
+    # stmt
+    def _Expr(self, tree):
+        self.fill()
+        self.dispatch(tree.value)
+
+    def _Import(self, t):
+        self.fill("import ")
+        interleave(lambda: self.write(", "), self.dispatch, t.names)
+
+    def _ImportFrom(self, t):
+        self.fill("from ")
+        self.write("." * t.level)
+        if t.module:
+            self.write(t.module)
+        self.write(" import ")
+        interleave(lambda: self.write(", "), self.dispatch, t.names)
+
+    def _Assign(self, t):
+        self.fill()
+        for target in t.targets:
+            self.dispatch(target)
+            self.write(" = ")
+        self.dispatch(t.value)
+
+    def _AugAssign(self, t):
+        self.fill()
+        self.dispatch(t.target)
+        self.write(" "+self.binop[t.op.__class__.__name__]+"= ")
+        self.dispatch(t.value)
+
+    def _Return(self, t):
+        self.fill("return")
+        if t.value:
+            self.write(" ")
+            self.dispatch(t.value)
+
+    def _Pass(self, t):
+        self.fill("pass")
+
+    def _Break(self, t):
+        self.fill("break")
+
+    def _Continue(self, t):
+        self.fill("continue")
+
+    def _Delete(self, t):
+        self.fill("del ")
+        interleave(lambda: self.write(", "), self.dispatch, t.targets)
+
+    def _Assert(self, t):
+        self.fill("assert ")
+        self.dispatch(t.test)
+        if t.msg:
+            self.write(", ")
+            self.dispatch(t.msg)
+
+    def _Global(self, t):
+        self.fill("global ")
+        interleave(lambda: self.write(", "), self.write, t.names)
+
+    def _Nonlocal(self, t):
+        self.fill("nonlocal ")
+        interleave(lambda: self.write(", "), self.write, t.names)
+
+    def _Yield(self, t):
+        self.write("(")
+        self.write("yield")
+        if t.value:
+            self.write(" ")
+            self.dispatch(t.value)
+        self.write(")")
+
+    def _YieldFrom(self, t):
+        self.write("(")
+        self.write("yield from")
+        if t.value:
+            self.write(" ")
+            self.dispatch(t.value)
+        self.write(")")
+
+    def _Raise(self, t):
+        self.fill("raise")
+        if not t.exc:
+            assert not t.cause
+            return
+        self.write(" ")
+        self.dispatch(t.exc)
+        if t.cause:
+            self.write(" from ")
+            self.dispatch(t.cause)
+
+    def _Try(self, t):
+        self.fill("try")
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+        for ex in t.handlers:
+            self.dispatch(ex)
+        if t.orelse:
+            self.fill("else")
+            self.enter()
+            self.dispatch(t.orelse)
+            self.leave()
+        if t.finalbody:
+            self.fill("finally")
+            self.enter()
+            self.dispatch(t.finalbody)
+            self.leave()
+
+    def _ExceptHandler(self, t):
+        self.fill("except")
+        if t.type:
+            self.write(" ")
+            self.dispatch(t.type)
+        if t.name:
+            self.write(" as ")
+            self.write(t.name)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
+    def _ClassDef(self, t):
+        self.write("\n")
+        for deco in t.decorator_list:
+            self.fill("@")
+            self.dispatch(deco)
+        self.fill("class "+t.name)
+        self.write("(")
+        comma = False
+        for e in t.bases:
+            if comma: self.write(", ")
+            else: comma = True
+            self.dispatch(e)
+        for e in t.keywords:
+            if comma: self.write(", ")
+            else: comma = True
+            self.dispatch(e)
+        if t.starargs:
+            if comma: self.write(", ")
+            else: comma = True
+            self.write("*")
+            self.dispatch(t.starargs)
+        if t.kwargs:
+            if comma: self.write(", ")
+            else: comma = True
+            self.write("**")
+            self.dispatch(t.kwargs)
+        self.write(")")
+
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
+    def _FunctionDef(self, t):
+        self.write("\n")
+        for deco in t.decorator_list:
+            self.fill("@")
+            self.dispatch(deco)
+        self.fill("def "+t.name + "(")
+        self.dispatch(t.args)
+        self.write(")")
+        if t.returns:
+            self.write(" -> ")
+            self.dispatch(t.returns)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
+    def _For(self, t):
+        self.fill("for ")
+        self.dispatch(t.target)
+        self.write(" in ")
+        self.dispatch(t.iter)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+        if t.orelse:
+            self.fill("else")
+            self.enter()
+            self.dispatch(t.orelse)
+            self.leave()
+
+    def _If(self, t):
+        self.fill("if ")
+        self.dispatch(t.test)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+        # collapse nested ifs into equivalent elifs.
+        while (t.orelse and len(t.orelse) == 1 and
+               isinstance(t.orelse[0], ast.If)):
+            t = t.orelse[0]
+            self.fill("elif ")
+            self.dispatch(t.test)
+            self.enter()
+            self.dispatch(t.body)
+            self.leave()
+        # final else
+        if t.orelse:
+            self.fill("else")
+            self.enter()
+            self.dispatch(t.orelse)
+            self.leave()
+
+    def _While(self, t):
+        self.fill("while ")
+        self.dispatch(t.test)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+        if t.orelse:
+            self.fill("else")
+            self.enter()
+            self.dispatch(t.orelse)
+            self.leave()
+
+    def _With(self, t):
+        self.fill("with ")
+        interleave(lambda: self.write(", "), self.dispatch, t.items)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
+    # expr
+    def _Bytes(self, t):
+        self.write(repr(t.s))
+
+    def _Str(self, tree):
+        self.write(repr(tree.s))
+
+    def _Name(self, t):
+        self.write(t.id)
+
+    def _NameConstant(self, t):
+        self.write(repr(t.value))
+
+    def _Num(self, t):
+        # Substitute overflowing decimal literal for AST infinities.
+        self.write(repr(t.n).replace("inf", INFSTR))
+
+    def _List(self, t):
+        self.write("[")
+        interleave(lambda: self.write(", "), self.dispatch, t.elts)
+        self.write("]")
+
+    def _ListComp(self, t):
+        self.write("[")
+        self.dispatch(t.elt)
+        for gen in t.generators:
+            self.dispatch(gen)
+        self.write("]")
+
+    def _GeneratorExp(self, t):
+        self.write("(")
+        self.dispatch(t.elt)
+        for gen in t.generators:
+            self.dispatch(gen)
+        self.write(")")
+
+    def _SetComp(self, t):
+        self.write("{")
+        self.dispatch(t.elt)
+        for gen in t.generators:
+            self.dispatch(gen)
+        self.write("}")
+
+    def _DictComp(self, t):
+        self.write("{")
+        self.dispatch(t.key)
+        self.write(": ")
+        self.dispatch(t.value)
+        for gen in t.generators:
+            self.dispatch(gen)
+        self.write("}")
+
+    def _comprehension(self, t):
+        self.write(" for ")
+        self.dispatch(t.target)
+        self.write(" in ")
+        self.dispatch(t.iter)
+        for if_clause in t.ifs:
+            self.write(" if ")
+            self.dispatch(if_clause)
+
+    def _IfExp(self, t):
+        self.write("(")
+        self.dispatch(t.body)
+        self.write(" if ")
+        self.dispatch(t.test)
+        self.write(" else ")
+        self.dispatch(t.orelse)
+        self.write(")")
+
+    def _Set(self, t):
+        assert(t.elts) # should be at least one element
+        self.write("{")
+        interleave(lambda: self.write(", "), self.dispatch, t.elts)
+        self.write("}")
+
+    def _Dict(self, t):
+        self.write("{")
+        def write_pair(pair):
+            (k, v) = pair
+            self.dispatch(k)
+            self.write(": ")
+            self.dispatch(v)
+        interleave(lambda: self.write(", "), write_pair, zip(t.keys, t.values))
+        self.write("}")
+
+    def _Tuple(self, t):
+        self.write("(")
+        if len(t.elts) == 1:
+            (elt,) = t.elts
+            self.dispatch(elt)
+            self.write(",")
+        else:
+            interleave(lambda: self.write(", "), self.dispatch, t.elts)
+        self.write(")")
+
+    unop = {"Invert":"~", "Not": "not", "UAdd":"+", "USub":"-"}
+    def _UnaryOp(self, t):
+        self.write("(")
+        self.write(self.unop[t.op.__class__.__name__])
+        self.write(" ")
+        self.dispatch(t.operand)
+        self.write(")")
+
+    binop = { "Add":"+", "Sub":"-", "Mult":"*", "Div":"/", "Mod":"%",
+                    "LShift":"<<", "RShift":">>", "BitOr":"|", "BitXor":"^", "BitAnd":"&",
+                    "FloorDiv":"//", "Pow": "**"}
+    def _BinOp(self, t):
+        self.write("(")
+        self.dispatch(t.left)
+        self.write(" " + self.binop[t.op.__class__.__name__] + " ")
+        self.dispatch(t.right)
+        self.write(")")
+
+    cmpops = {"Eq":"==", "NotEq":"!=", "Lt":"<", "LtE":"<=", "Gt":">", "GtE":">=",
+                        "Is":"is", "IsNot":"is not", "In":"in", "NotIn":"not in"}
+    def _Compare(self, t):
+        self.write("(")
+        self.dispatch(t.left)
+        for o, e in zip(t.ops, t.comparators):
+            self.write(" " + self.cmpops[o.__class__.__name__] + " ")
+            self.dispatch(e)
+        self.write(")")
+
+    boolops = {ast.And: 'and', ast.Or: 'or'}
+    def _BoolOp(self, t):
+        self.write("(")
+        s = " %s " % self.boolops[t.op.__class__]
+        interleave(lambda: self.write(s), self.dispatch, t.values)
+        self.write(")")
+
+    def _Attribute(self,t):
+        self.dispatch(t.value)
+        # Special case: 3.__abs__() is a syntax error, so if t.value
+        # is an integer literal then we need to either parenthesize
+        # it or add an extra space to get 3 .__abs__().
+        if isinstance(t.value, ast.Num) and isinstance(t.value.n, int):
+            self.write(" ")
+        self.write(".")
+        self.write(t.attr)
+
+    def _Call(self, t):
+        self.dispatch(t.func)
+        self.write("(")
+        comma = False
+        for e in t.args:
+            if comma: self.write(", ")
+            else: comma = True
+            self.dispatch(e)
+        for e in t.keywords:
+            if comma: self.write(", ")
+            else: comma = True
+            self.dispatch(e)
+        if t.starargs:
+            if comma: self.write(", ")
+            else: comma = True
+            self.write("*")
+            self.dispatch(t.starargs)
+        if t.kwargs:
+            if comma: self.write(", ")
+            else: comma = True
+            self.write("**")
+            self.dispatch(t.kwargs)
+        self.write(")")
+
+    def _Subscript(self, t):
+        self.dispatch(t.value)
+        self.write("[")
+        self.dispatch(t.slice)
+        self.write("]")
+
+    def _Starred(self, t):
+        self.write("*")
+        self.dispatch(t.value)
+
+    # slice
+    def _Ellipsis(self, t):
+        self.write("...")
+
+    def _Index(self, t):
+        self.dispatch(t.value)
+
+    def _Slice(self, t):
+        if t.lower:
+            self.dispatch(t.lower)
+        self.write(":")
+        if t.upper:
+            self.dispatch(t.upper)
+        if t.step:
+            self.write(":")
+            self.dispatch(t.step)
+
+    def _ExtSlice(self, t):
+        interleave(lambda: self.write(', '), self.dispatch, t.dims)
+
+    # argument
+    def _arg(self, t):
+        self.write(t.arg)
+        if t.annotation:
+            self.write(": ")
+            self.dispatch(t.annotation)
+
+    # others
+    def _arguments(self, t):
+        first = True
+        # normal arguments
+        defaults = [None] * (len(t.args) - len(t.defaults)) + t.defaults
+        for a, d in zip(t.args, defaults):
+            if first:first = False
+            else: self.write(", ")
+            self.dispatch(a)
+            if d:
+                self.write("=")
+                self.dispatch(d)
+
+        # varargs, or bare '*' if no varargs but keyword-only arguments present
+        if t.vararg or t.kwonlyargs:
+            if first:first = False
+            else: self.write(", ")
+            self.write("*")
+            if t.vararg:
+                self.write(t.vararg.arg)
+                if t.vararg.annotation:
+                    self.write(": ")
+                    self.dispatch(t.vararg.annotation)
+
+        # keyword-only arguments
+        if t.kwonlyargs:
+            for a, d in zip(t.kwonlyargs, t.kw_defaults):
+                if first:first = False
+                else: self.write(", ")
+                self.dispatch(a),
+                if d:
+                    self.write("=")
+                    self.dispatch(d)
+
+        # kwargs
+        if t.kwarg:
+            if first:first = False
+            else: self.write(", ")
+            self.write("**"+t.kwarg.arg)
+            if t.kwarg.annotation:
+                self.write(": ")
+                self.dispatch(t.kwarg.annotation)
+
+    def _keyword(self, t):
+        self.write(t.arg)
+        self.write("=")
+        self.dispatch(t.value)
+
+    def _Lambda(self, t):
+        self.write("(")
+        self.write("lambda ")
+        self.dispatch(t.args)
+        self.write(": ")
+        self.dispatch(t.body)
+        self.write(")")
+
+    def _alias(self, t):
+        self.write(t.name)
+        if t.asname:
+            self.write(" as "+t.asname)
+
+    def _withitem(self, t):
+        self.dispatch(t.context_expr)
+        if t.optional_vars:
+            self.write(" as ")
+            self.dispatch(t.optional_vars)
+
+def roundtrip(filename, output=sys.stdout):
+    with open(filename, "rb") as pyfile:
+        encoding = tokenize.detect_encoding(pyfile.readline)[0]
+    with open(filename, "r", encoding=encoding) as pyfile:
+        source = pyfile.read()
+    tree = compile(source, filename, "exec", ast.PyCF_ONLY_AST)
+    Unparser(tree, output)
+
+
+
+def testdir(a):
+    try:
+        names = [n for n in os.listdir(a) if n.endswith('.py')]
+    except OSError:
+        print("Directory not readable: %s" % a, file=sys.stderr)
+    else:
+        for n in names:
+            fullname = os.path.join(a, n)
+            if os.path.isfile(fullname):
+                output = io.StringIO()
+                print('Testing %s' % fullname)
+                try:
+                    roundtrip(fullname, output)
+                except Exception as e:
+                    print('  Failed to compile, exception is %s' % repr(e))
+            elif os.path.isdir(fullname):
+                testdir(fullname)
+
+def main(args):
+    if args[0] == '--testdir':
+        for a in args[1:]:
+            testdir(a)
+    else:
+        for a in args:
+            roundtrip(a)
+
+if __name__=='__main__':
+    main(sys.argv[1:])
diff --git a/pytest.ini b/pytest.ini
new file mode 100644
index 0000000..2b14288
--- /dev/null
+++ b/pytest.ini
@@ -0,0 +1,14 @@
+[pytest]
+# testpaths is new in 2.8
+minversion = 2.8
+
+testpaths = mypy/test
+
+python_files = test*.py
+
+# empty patterns for default python collector, to stick to our plugin's collector
+python_classes =
+python_functions =
+
+# always run in parallel (requires pytest-xdist, see test-requirements.txt)
+addopts = -nauto --cov-append --cov-report=
diff --git a/runtests.py b/runtests.py
new file mode 100755
index 0000000..2caa751
--- /dev/null
+++ b/runtests.py
@@ -0,0 +1,428 @@
+#!/usr/bin/env python3
+"""Mypy test runner."""
+
+if False:
+    import typing
+
+if True:
+    # When this is run as a script, `typing` is not available yet.
+    import sys
+    from os.path import join, isdir
+
+    def get_versions():  # type: () -> typing.List[str]
+        major = sys.version_info[0]
+        minor = sys.version_info[1]
+        if major == 2:
+            return ['2.7']
+        else:
+            # generates list of python versions to use.
+            # For Python2, this is only [2.7].
+            # Otherwise, it is [3.4, 3.3, 3.2, 3.1, 3.0].
+            return ['%d.%d' % (major, i) for i in range(minor, -1, -1)]
+
+    sys.path[0:0] = [v for v in [join('lib-typing', v) for v in get_versions()] if isdir(v)]
+    # Now `typing` is available.
+
+
+from typing import Dict, List, Optional, Set, Iterable
+
+from mypy.waiter import Waiter, LazySubprocess
+from mypy import util
+from mypy.test.config import test_data_prefix
+from mypy.test.testpythoneval import python_eval_files, python_34_eval_files
+
+import itertools
+import os
+import re
+
+
+# Ideally, all tests would be `discover`able so that they can be driven
+# (and parallelized) by an external test driver.
+
+class Driver:
+
+    def __init__(self, whitelist: List[str], blacklist: List[str],
+            arglist: List[str], verbosity: int, parallel_limit: int,
+            xfail: List[str], coverage: bool) -> None:
+        self.whitelist = whitelist
+        self.blacklist = blacklist
+        self.arglist = arglist
+        self.verbosity = verbosity
+        self.waiter = Waiter(verbosity=verbosity, limit=parallel_limit, xfail=xfail)
+        self.versions = get_versions()
+        self.cwd = os.getcwd()
+        self.mypy = os.path.join(self.cwd, 'scripts', 'mypy')
+        self.env = dict(os.environ)
+        self.coverage = coverage
+
+    def prepend_path(self, name: str, paths: List[str]) -> None:
+        old_val = self.env.get(name)
+        paths = [p for p in paths if isdir(p)]
+        if not paths:
+            return
+        if old_val is not None:
+            new_val = ':'.join(itertools.chain(paths, [old_val]))
+        else:
+            new_val = ':'.join(paths)
+        self.env[name] = new_val
+
+    def allow(self, name: str) -> bool:
+        if any(f in name for f in self.whitelist):
+            if not any(f in name for f in self.blacklist):
+                if self.verbosity >= 2:
+                    print('SELECT   #%d %s' % (len(self.waiter.queue), name))
+                return True
+        if self.verbosity >= 3:
+            print('OMIT     %s' % name)
+        return False
+
+    def add_mypy_cmd(self, name: str, mypy_args: List[str], cwd: Optional[str] = None) -> None:
+        full_name = 'check %s' % name
+        if not self.allow(full_name):
+            return
+        args = [sys.executable, self.mypy] + mypy_args
+        args.append('--show-traceback')
+        self.waiter.add(LazySubprocess(full_name, args, cwd=cwd, env=self.env))
+
+    def add_mypy(self, name: str, *args: str, cwd: Optional[str] = None) -> None:
+        self.add_mypy_cmd(name, list(args), cwd=cwd)
+
+    def add_mypy_modules(self, name: str, modules: Iterable[str],
+                         cwd: Optional[str] = None) -> None:
+        args = list(itertools.chain(*(['-m', mod] for mod in modules)))
+        self.add_mypy_cmd(name, args, cwd=cwd)
+
+    def add_mypy_package(self, name: str, packagename: str, *flags: str) -> None:
+        self.add_mypy_cmd(name, ['-p', packagename] + list(flags))
+
+    def add_mypy_string(self, name: str, *args: str, cwd: Optional[str] = None) -> None:
+        self.add_mypy_cmd(name, ['-c'] + list(args), cwd=cwd)
+
+    def add_pytest(self, name: str, pytest_args: List[str], coverage: bool = False) -> None:
+        full_name = 'pytest %s' % name
+        if not self.allow(full_name):
+            return
+        if coverage and self.coverage:
+            args = [sys.executable, '-m', 'pytest', '--cov=mypy'] + pytest_args
+        else:
+            args = [sys.executable, '-m', 'pytest'] + pytest_args
+
+        self.waiter.add(LazySubprocess(full_name, args, env=self.env))
+
+    def add_python(self, name: str, *args: str, cwd: Optional[str] = None) -> None:
+        name = 'run %s' % name
+        if not self.allow(name):
+            return
+        largs = list(args)
+        largs[0:0] = [sys.executable]
+        env = self.env
+        self.waiter.add(LazySubprocess(name, largs, cwd=cwd, env=env))
+
+    def add_python_mod(self, name: str, *args: str, cwd: Optional[str] = None,
+                       coverage: bool = False) -> None:
+        name = 'run %s' % name
+        if not self.allow(name):
+            return
+        largs = list(args)
+        if coverage and self.coverage:
+            largs[0:0] = ['coverage', 'run', '-m']
+        else:
+            largs[0:0] = [sys.executable, '-m']
+        env = self.env
+        self.waiter.add(LazySubprocess(name, largs, cwd=cwd, env=env))
+
+    def add_python_string(self, name: str, *args: str, cwd: Optional[str] = None) -> None:
+        name = 'run %s' % name
+        if not self.allow(name):
+            return
+        largs = list(args)
+        largs[0:0] = [sys.executable, '-c']
+        env = self.env
+        self.waiter.add(LazySubprocess(name, largs, cwd=cwd, env=env))
+
+    def add_python2(self, name: str, *args: str, cwd: Optional[str] = None) -> None:
+        name = 'run2 %s' % name
+        if not self.allow(name):
+            return
+        largs = list(args)
+        python2 = util.try_find_python2_interpreter()
+        assert python2, "Couldn't find a Python 2.7 interpreter"
+        largs[0:0] = [python2]
+        env = self.env
+        self.waiter.add(LazySubprocess(name, largs, cwd=cwd, env=env))
+
+    def add_flake8(self, cwd: Optional[str] = None) -> None:
+        name = 'lint'
+        if not self.allow(name):
+            return
+        largs = ['flake8', '-j{}'.format(self.waiter.limit)]
+        env = self.env
+        self.waiter.add(LazySubprocess(name, largs, cwd=cwd, env=env))
+
+    def list_tasks(self) -> None:
+        for id, task in enumerate(self.waiter.queue):
+            print('{id}:{task}'.format(id=id, task=task.name))
+
+
+def add_basic(driver: Driver) -> None:
+    if False:
+        driver.add_mypy('file setup.py', 'setup.py')
+    driver.add_mypy('file runtests.py', 'runtests.py')
+    driver.add_mypy('legacy entry script', 'scripts/mypy')
+    driver.add_mypy('legacy myunit script', 'scripts/myunit')
+    # needs typed_ast installed:
+    driver.add_mypy('fast-parse', '--fast-parse', 'test-data/samples/hello.py')
+
+
+def add_selftypecheck(driver: Driver) -> None:
+    driver.add_mypy_package('package mypy', 'mypy', '--fast-parser',
+                            '--config-file', 'mypy_self_check.ini')
+    driver.add_mypy_package('package mypy', 'mypy', '--fast-parser',
+                            '--config-file', 'mypy_strict_optional.ini')
+
+
+def find_files(base: str, prefix: str = '', suffix: str = '') -> List[str]:
+    return [join(root, f)
+            for root, dirs, files in os.walk(base)
+            for f in files
+            if f.startswith(prefix) and f.endswith(suffix)]
+
+
+def file_to_module(file: str) -> str:
+    rv = os.path.splitext(file)[0].replace(os.sep, '.')
+    if rv.endswith('.__init__'):
+        rv = rv[:-len('.__init__')]
+    return rv
+
+
+def add_imports(driver: Driver) -> None:
+    # Make sure each module can be imported originally.
+    # There is currently a bug in mypy where a module can pass typecheck
+    # because of *implicit* imports from other modules.
+    for f in find_files('mypy', suffix='.py'):
+        mod = file_to_module(f)
+        if not mod.endswith('.__main__'):
+            driver.add_python_string('import %s' % mod, 'import %s' % mod)
+
+
+PYTEST_FILES = [os.path.join('mypy', 'test', '{}.py'.format(name)) for name in [
+    'testcheck', 'testextensions',
+]]
+
+
+def add_pytest(driver: Driver) -> None:
+    for f in PYTEST_FILES:
+        driver.add_pytest(f, [f] + driver.arglist, True)
+
+
+def add_myunit(driver: Driver) -> None:
+    for f in find_files('mypy', prefix='test', suffix='.py'):
+        mod = file_to_module(f)
+        if mod in ('mypy.test.testpythoneval', 'mypy.test.testcmdline'):
+            # Run Python evaluation integration tests and command-line
+            # parsing tests separately since they are much slower than
+            # proper unit tests.
+            pass
+        elif f in PYTEST_FILES:
+            # This module has been converted to pytest; don't try to use myunit.
+            pass
+        else:
+            driver.add_python_mod('unit-test %s' % mod, 'mypy.myunit', '-m', mod,
+                                  *driver.arglist, coverage=True)
+
+
+def add_pythoneval(driver: Driver) -> None:
+    cases = set()
+    case_re = re.compile(r'^\[case ([^\]]+)\]$')
+    for file in python_eval_files + python_34_eval_files:
+        with open(os.path.join(test_data_prefix, file), 'r') as f:
+            for line in f:
+                m = case_re.match(line)
+                if m:
+                    case_name = m.group(1)
+                    assert case_name[:4] == 'test'
+                    cases.add(case_name[4:5])
+
+    for prefix in sorted(cases):
+        driver.add_python_mod(
+            'eval-test-' + prefix,
+            'mypy.myunit',
+            '-m',
+            'mypy.test.testpythoneval',
+            'test_testpythoneval_PythonEvaluationSuite.test' + prefix + '*',
+            *driver.arglist,
+            coverage=True
+        )
+
+
+def add_cmdline(driver: Driver) -> None:
+    driver.add_python_mod('cmdline-test', 'mypy.myunit',
+                          '-m', 'mypy.test.testcmdline', *driver.arglist,
+                         coverage=True)
+
+
+def add_stubs(driver: Driver) -> None:
+    # We only test each module in the one version mypy prefers to find.
+    # TODO: test stubs for other versions, especially Python 2 stubs.
+
+    modules = set()  # type: Set[str]
+    modules.add('typing')
+    # TODO: This should also test Python 2, and pass pyversion accordingly.
+    for version in ["2and3", "3", "3.3", "3.4", "3.5"]:
+        for stub_type in ['builtins', 'stdlib', 'third_party']:
+            stubdir = join('typeshed', stub_type, version)
+            for f in find_files(stubdir, suffix='.pyi'):
+                module = file_to_module(f[len(stubdir) + 1:])
+                modules.add(module)
+
+    driver.add_mypy_modules('stubs', sorted(modules))
+
+
+def add_stdlibsamples(driver: Driver) -> None:
+    seen = set()  # type: Set[str]
+    for version in driver.versions:
+        stdlibsamples_dir = join(driver.cwd, 'test-data', 'stdlib-samples', version)
+        modules = []  # type: List[str]
+        for f in find_files(stdlibsamples_dir, prefix='test_', suffix='.py'):
+            module = file_to_module(f[len(stdlibsamples_dir) + 1:])
+            if module not in seen:
+                seen.add(module)
+                modules.append(module)
+        if modules:
+            driver.add_mypy_modules('stdlibsamples (%s)' % (version,), modules,
+                                    cwd=stdlibsamples_dir)
+
+
+def add_samples(driver: Driver) -> None:
+    for f in find_files(os.path.join('test-data', 'samples'), suffix='.py'):
+        driver.add_mypy('file %s' % f, f, '--fast-parser')
+
+
+def usage(status: int) -> None:
+    print('Usage: %s [-h | -v | -q | [-x] FILTER | -a ARG] ... [-- FILTER ...]' % sys.argv[0])
+    print()
+    print('Run mypy tests. If given no arguments, run all tests.')
+    print()
+    print('Examples:')
+    print('  %s unit-test  (run unit tests only)' % sys.argv[0])
+    print('  %s unit-test -a "*tuple*"' % sys.argv[0])
+    print('       (run all unit tests with "tuple" in test name)')
+    print()
+    print('Options:')
+    print('  -h, --help             show this help')
+    print('  -v, --verbose          increase driver verbosity')
+    print('  -q, --quiet            decrease driver verbosity')
+    print('  -jN                    run N tasks at once (default: one per CPU)')
+    print('  -a, --argument ARG     pass an argument to myunit tasks')
+    print('                         (-v: verbose; glob pattern: filter by test name)')
+    print('  -l, --list             list included tasks (after filtering) and exit')
+    print('  FILTER                 include tasks matching FILTER')
+    print('  -x, --exclude FILTER   exclude tasks matching FILTER')
+    print('  -c, --coverage         calculate code coverage while running tests')
+    print('  --                     treat all remaining arguments as positional')
+    sys.exit(status)
+
+
+def sanity() -> None:
+    paths = os.getenv('PYTHONPATH')
+    if paths is None:
+        return
+    failed = False
+    for p in paths.split(os.pathsep):
+        if not os.path.isabs(p):
+            print('Relative PYTHONPATH entry %r' % p)
+            failed = True
+    if failed:
+        print('Please use absolute so that chdir() tests can work.')
+        print('Cowardly refusing to continue.')
+        sys.exit(1)
+
+
+def main() -> None:
+    sanity()
+
+    verbosity = 0
+    parallel_limit = 0
+    whitelist = []  # type: List[str]
+    blacklist = []  # type: List[str]
+    arglist = []  # type: List[str]
+    list_only = False
+    coverage = False
+
+    allow_opts = True
+    curlist = whitelist
+    for a in sys.argv[1:]:
+        if curlist is not arglist and allow_opts and a.startswith('-'):
+            if curlist is not whitelist:
+                break
+            if a == '--':
+                allow_opts = False
+            elif a == '-v' or a == '--verbose':
+                verbosity += 1
+            elif a == '-q' or a == '--quiet':
+                verbosity -= 1
+            elif a.startswith('-j'):
+                try:
+                    parallel_limit = int(a[2:])
+                except ValueError:
+                    usage(1)
+            elif a == '-x' or a == '--exclude':
+                curlist = blacklist
+            elif a == '-a' or a == '--argument':
+                curlist = arglist
+            elif a == '-l' or a == '--list':
+                list_only = True
+            elif a == '-c' or a == '--coverage':
+                coverage = True
+            elif a == '-h' or a == '--help':
+                usage(0)
+            else:
+                usage(1)
+        else:
+            curlist.append(a)
+            curlist = whitelist
+    if curlist is blacklist:
+        sys.exit('-x must be followed by a filter')
+    if curlist is arglist:
+        sys.exit('-a must be followed by an argument')
+    # empty string is a substring of all names
+    if not whitelist:
+        whitelist.append('')
+
+    driver = Driver(whitelist=whitelist, blacklist=blacklist, arglist=arglist,
+            verbosity=verbosity, parallel_limit=parallel_limit, xfail=[], coverage=coverage)
+
+    driver.prepend_path('PATH', [join(driver.cwd, 'scripts')])
+    driver.prepend_path('MYPYPATH', [driver.cwd])
+    driver.prepend_path('PYTHONPATH', [driver.cwd])
+    driver.prepend_path('PYTHONPATH', [join(driver.cwd, 'lib-typing', v) for v in driver.versions])
+
+    add_pythoneval(driver)
+    add_cmdline(driver)
+    add_basic(driver)
+    add_selftypecheck(driver)
+    add_pytest(driver)
+    add_myunit(driver)
+    add_imports(driver)
+    add_stubs(driver)
+    add_stdlibsamples(driver)
+    add_samples(driver)
+    driver.add_flake8()
+
+    if list_only:
+        driver.list_tasks()
+        return
+
+    exit_code = driver.waiter.run()
+
+    if verbosity >= 1:
+        times = driver.waiter.times2 if verbosity >= 2 else driver.waiter.times1
+        times_sortable = ((t, tp) for (tp, t) in times.items())
+        for total_time, test_type in sorted(times_sortable, reverse=True):
+            print('total time in %s: %f' % (test_type, total_time))
+
+    sys.exit(exit_code)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/setup.cfg b/setup.cfg
index 8962631..857ea67 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -7,7 +7,8 @@ exclude =
 	pinfer/*,
 	scripts/*,
 	test-data/*,
-	typeshed/*
+	typeshed/*,
+	tmp-test-dirs/*
 ignore = E251,E128,F401,W601,E701,W503,E704,E402,B3,B006,B007
 
 [coverage:run]
@@ -26,5 +27,4 @@ requires-dist =
 [egg_info]
 tag_build = 
 tag_date = 0
-tag_svn_revision = 0
 
diff --git a/test-data/.flake8 b/test-data/.flake8
new file mode 100644
index 0000000..a011f30
--- /dev/null
+++ b/test-data/.flake8
@@ -0,0 +1,21 @@
+# Some PEP8 deviations are considered irrelevant to stub files:
+# (error counts as of 2016-12-19)
+# 17381 E704 multiple statements on one line (def)
+# 11840 E301 expected 1 blank line
+#  7467 E302 expected 2 blank lines
+#  1772 E501 line too long
+#  1487 F401 imported but unused
+#  1248 E701 multiple statements on one line (colon)
+#   427 F811 redefinition
+#   356 E305 expected 2 blank lines
+
+# Nice-to-haves ignored for now
+#   152 E128 continuation line under-indented for visual indent
+#    43 E127 continuation line over-indented for visual indent
+
+[flake8]
+ignore = F401, F811, E127, E128, E301, E302, E305, E501, E701, E704, B303
+# We are checking with Python 3 but many of the stubs are Python 2 stubs.
+# A nice future improvement would be to provide separate .flake8
+# configurations for Python 2 and Python 3 files.
+builtins = StandardError,apply,basestring,buffer,cmp,coerce,execfile,file,intern,long,raw_input,reduce,reload,unichr,unicode,xrange
diff --git a/test-data/samples/bottles.py b/test-data/samples/bottles.py
new file mode 100644
index 0000000..ddf77f5
--- /dev/null
+++ b/test-data/samples/bottles.py
@@ -0,0 +1,13 @@
+import typing
+
+REFRAIN = '''
+%d bottles of beer on the wall,
+%d bottles of beer,
+take one down, pass it around,
+%d bottles of beer on the wall!
+'''
+bottles_of_beer = 99
+while bottles_of_beer > 1:
+    print(REFRAIN % (bottles_of_beer, bottles_of_beer,
+          bottles_of_beer - 1))
+    bottles_of_beer -= 1
diff --git a/test-data/samples/class.py b/test-data/samples/class.py
new file mode 100644
index 0000000..d2eb4ac
--- /dev/null
+++ b/test-data/samples/class.py
@@ -0,0 +1,18 @@
+import typing
+
+
+class BankAccount(object):
+    def __init__(self, initial_balance: int = 0) -> None:
+        self.balance = initial_balance
+
+    def deposit(self, amount: int) -> None:
+        self.balance += amount
+
+    def withdraw(self, amount: int) -> None:
+        self.balance -= amount
+
+    def overdrawn(self) -> bool:
+        return self.balance < 0
+my_account = BankAccount(15)
+my_account.withdraw(5)
+print(my_account.balance)
diff --git a/test-data/samples/cmdline.py b/test-data/samples/cmdline.py
new file mode 100644
index 0000000..105c27a
--- /dev/null
+++ b/test-data/samples/cmdline.py
@@ -0,0 +1,8 @@
+# This program adds up integers in the command line
+import sys
+import typing
+try:
+    total = sum(int(arg) for arg in sys.argv[1:])
+    print('sum =', total)
+except ValueError:
+    print('Please supply integer arguments')
diff --git a/test-data/samples/crawl.py b/test-data/samples/crawl.py
new file mode 100644
index 0000000..56b6f8f
--- /dev/null
+++ b/test-data/samples/crawl.py
@@ -0,0 +1,863 @@
+#!/usr/bin/env python3.4
+
+"""A simple web crawler."""
+
+# This is cloned from <asyncio>/examples/crawl.py,
+# with type annotations added (PEP 484).
+#
+# TODO: convert to `async def` + `await` (PEP 492).
+
+import argparse
+import asyncio
+import cgi
+from http.client import BadStatusLine
+import logging
+import re
+import sys
+import time
+import urllib.parse
+from typing import Any, Generator, IO, Optional, Sequence, Set, Tuple
+
+
+ARGS = argparse.ArgumentParser(description="Web crawler")
+ARGS.add_argument(
+    '--iocp', action='store_true', dest='iocp',
+    default=False, help='Use IOCP event loop (Windows only)')
+ARGS.add_argument(
+    '--select', action='store_true', dest='select',
+    default=False, help='Use Select event loop instead of default')
+ARGS.add_argument(
+    'roots', nargs='*',
+    default=[], help='Root URL (may be repeated)')
+ARGS.add_argument(
+    '--max_redirect', action='store', type=int, metavar='N',
+    default=10, help='Limit redirection chains (for 301, 302 etc.)')
+ARGS.add_argument(
+    '--max_tries', action='store', type=int, metavar='N',
+    default=4, help='Limit retries on network errors')
+ARGS.add_argument(
+    '--max_tasks', action='store', type=int, metavar='N',
+    default=100, help='Limit concurrent connections')
+ARGS.add_argument(
+    '--max_pool', action='store', type=int, metavar='N',
+    default=100, help='Limit connection pool size')
+ARGS.add_argument(
+    '--exclude', action='store', metavar='REGEX',
+    help='Exclude matching URLs')
+ARGS.add_argument(
+    '--strict', action='store_true',
+    default=True, help='Strict host matching (default)')
+ARGS.add_argument(
+    '--lenient', action='store_false', dest='strict',
+    default=False, help='Lenient host matching')
+ARGS.add_argument(
+    '-v', '--verbose', action='count', dest='level',
+    default=1, help='Verbose logging (repeat for more verbose)')
+ARGS.add_argument(
+    '-q', '--quiet', action='store_const', const=0, dest='level',
+    default=1, help='Quiet logging (opposite of --verbose)')
+
+
+ESCAPES = [('quot', '"'),
+           ('gt', '>'),
+           ('lt', '<'),
+           ('amp', '&')  # Must be last.
+           ]
+
+
+def unescape(url: str) -> str:
+    """Turn & into &, and so on.
+
+    This is the inverse of cgi.escape().
+    """
+    for name, char in ESCAPES:
+        url = url.replace('&' + name + ';', char)
+    return url
+
+
+def fix_url(url: str) -> str:
+    """Prefix a schema-less URL with http://."""
+    if '://' not in url:
+        url = 'http://' + url
+    return url
+
+
+class Logger:
+
+    def __init__(self, level: int) -> None:
+        self.level = level
+
+    def _log(self, n: int, args: Sequence[Any]) -> None:
+        if self.level >= n:
+            print(*args, file=sys.stderr, flush=True)
+
+    def log(self, n: int, *args: Any) -> None:
+        self._log(n, args)
+
+    def __call__(self, n: int, *args: Any) -> None:
+        self._log(n, args)
+
+
+KeyTuple = Tuple[str, int, bool]
+
+
+class ConnectionPool:
+    """A connection pool.
+
+    To open a connection, use reserve().  To recycle it, use unreserve().
+
+    The pool is mostly just a mapping from (host, port, ssl) tuples to
+    lists of Connections.  The currently active connections are *not*
+    in the data structure; get_connection() takes the connection out,
+    and recycle_connection() puts it back in.  To recycle a
+    connection, call conn.close(recycle=True).
+
+    There are limits to both the overall pool and the per-key pool.
+    """
+
+    def __init__(self, log: Logger, max_pool: int = 10, max_tasks: int = 5) -> None:
+        self.log = log
+        self.max_pool = max_pool  # Overall limit.
+        self.max_tasks = max_tasks  # Per-key limit.
+        self.loop = asyncio.get_event_loop()
+        self.connections = {}  # type: Dict[KeyTuple, List[Connection]]
+        self.queue = []  # type: List[Connection]
+
+    def close(self) -> None:
+        """Close all connections available for reuse."""
+        for conns in self.connections.values():
+            for conn in conns:
+                conn.close()
+        self.connections.clear()
+        self.queue.clear()
+
+    @asyncio.coroutine
+    def get_connection(self, host: str, port: int, ssl: bool) -> Generator[Any, None, 'Connection']:
+        """Create or reuse a connection."""
+        port = port or (443 if ssl else 80)
+        try:
+            ipaddrs = yield from self.loop.getaddrinfo(host, port)
+        except Exception as exc:
+            self.log(0, 'Exception %r for (%r, %r)' % (exc, host, port))
+            raise
+        self.log(1, '* %s resolves to %s' %
+                    (host, ', '.join(ip[4][0] for ip in ipaddrs)))
+
+        # Look for a reusable connection.
+        for _1, _2, _3, _4, (h, p, *_5) in ipaddrs:
+            key = h, p, ssl
+            conn = None
+            conns = self.connections.get(key)
+            while conns:
+                conn = conns.pop(0)
+                self.queue.remove(conn)
+                if not conns:
+                    del self.connections[key]
+                if conn.stale():
+                    self.log(1, 'closing stale connection for', key)
+                    conn.close()  # Just in case.
+                else:
+                    self.log(1, '* Reusing pooled connection', key,
+                                'FD =', conn.fileno())
+                    return conn
+
+        # Create a new connection.
+        conn = Connection(self.log, self, host, port, ssl)
+        yield from conn.connect()
+        self.log(1, '* New connection', conn.key, 'FD =', conn.fileno())
+        return conn
+
+    def recycle_connection(self, conn: 'Connection') -> None:
+        """Make a connection available for reuse.
+
+        This also prunes the pool if it exceeds the size limits.
+        """
+        if conn.stale():
+            conn.close()
+            return
+
+        key = conn.key
+        conns = self.connections.setdefault(key, [])
+        conns.append(conn)
+        self.queue.append(conn)
+
+        if len(conns) <= self.max_tasks and len(self.queue) <= self.max_pool:
+            return
+
+        # Prune the queue.
+
+        # Close stale connections for this key first.
+        stale = [conn for conn in conns if conn.stale()]
+        if stale:
+            for conn in stale:
+                conns.remove(conn)
+                self.queue.remove(conn)
+                self.log(1, 'closing stale connection for', key)
+                conn.close()
+            if not conns:
+                del self.connections[key]
+
+        # Close oldest connection(s) for this key if limit reached.
+        while len(conns) > self.max_tasks:
+            conn = conns.pop(0)
+            self.queue.remove(conn)
+            self.log(1, 'closing oldest connection for', key)
+            conn.close()
+
+        if len(self.queue) <= self.max_pool:
+            return
+
+        # Close overall stale connections.
+        stale = [conn for conn in self.queue if conn.stale()]
+        if stale:
+            for conn in stale:
+                conns = self.connections.get(conn.key)
+                conns.remove(conn)
+                self.queue.remove(conn)
+                self.log(1, 'closing stale connection for', key)
+                conn.close()
+
+        # Close oldest overall connection(s) if limit reached.
+        while len(self.queue) > self.max_pool:
+            conn = self.queue.pop(0)
+            conns = self.connections.get(conn.key)
+            c = conns.pop(0)
+            assert conn == c, (conn.key, conn, c, conns)
+            self.log(1, 'closing overall oldest connection for', conn.key)
+            conn.close()
+
+
+class Connection:
+
+    def __init__(self, log: Logger, pool: ConnectionPool, host: str, port: int, ssl: bool) -> None:
+        self.log = log
+        self.pool = pool
+        self.host = host
+        self.port = port
+        self.ssl = ssl
+        self.reader = None  # type: asyncio.StreamReader
+        self.writer = None  # type: asyncio.StreamWriter
+        self.key = None  # type: KeyTuple
+
+    def stale(self) -> bool:
+        return self.reader is None or self.reader.at_eof()
+
+    def fileno(self) -> Optional[int]:
+        writer = self.writer
+        if writer is not None:
+            transport = writer.transport
+            if transport is not None:
+                sock = transport.get_extra_info('socket')
+                if sock is not None:
+                    return sock.fileno()
+        return None
+
+    @asyncio.coroutine
+    def connect(self) -> Generator[Any, None, None]:
+        self.reader, self.writer = yield from asyncio.open_connection(
+            self.host, self.port, ssl=self.ssl)
+        peername = self.writer.get_extra_info('peername')
+        if peername:
+            self.host, self.port = peername[:2]
+        else:
+            self.log(1, 'NO PEERNAME???', self.host, self.port, self.ssl)
+        self.key = self.host, self.port, self.ssl
+
+    def close(self, recycle: bool = False) -> None:
+        if recycle and not self.stale():
+            self.pool.recycle_connection(self)
+        else:
+            self.writer.close()
+            self.pool = self.reader = self.writer = None
+
+
+class Request:
+    """HTTP request.
+
+    Use connect() to open a connection; send_request() to send the
+    request; get_response() to receive the response headers.
+    """
+
+    def __init__(self, log: Logger, url: str, pool: ConnectionPool) -> None:
+        self.log = log
+        self.url = url
+        self.pool = pool
+        self.parts = urllib.parse.urlparse(self.url)
+        self.scheme = self.parts.scheme
+        assert self.scheme in ('http', 'https'), repr(url)
+        self.ssl = self.parts.scheme == 'https'
+        self.netloc = self.parts.netloc
+        self.hostname = self.parts.hostname
+        self.port = self.parts.port or (443 if self.ssl else 80)
+        self.path = (self.parts.path or '/')
+        self.query = self.parts.query
+        if self.query:
+            self.full_path = '%s?%s' % (self.path, self.query)
+        else:
+            self.full_path = self.path
+        self.http_version = 'HTTP/1.1'
+        self.method = 'GET'
+        self.headers = []  # type: List[Tuple[str, str]]
+        self.conn = None  # type: Connection
+
+    @asyncio.coroutine
+    def connect(self) -> Generator[Any, None, None]:
+        """Open a connection to the server."""
+        self.log(1, '* Connecting to %s:%s using %s for %s' %
+                    (self.hostname, self.port,
+                     'ssl' if self.ssl else 'tcp',
+                     self.url))
+        self.conn = yield from self.pool.get_connection(self.hostname,
+                                                        self.port, self.ssl)
+
+    def close(self, recycle: bool = False) -> None:
+        """Close the connection, recycle if requested."""
+        if self.conn is not None:
+            if not recycle:
+                self.log(1, 'closing connection for', self.conn.key)
+            self.conn.close(recycle)
+            self.conn = None
+
+    @asyncio.coroutine
+    def putline(self, line: str) -> Generator[Any, None, None]:
+        """Write a line to the connection.
+
+        Used for the request line and headers.
+        """
+        self.log(2, '>', line)
+        self.conn.writer.write(line.encode('latin-1') + b'\r\n')
+
+    @asyncio.coroutine
+    def send_request(self) -> Generator[Any, None, None]:
+        """Send the request."""
+        request_line = '%s %s %s' % (self.method, self.full_path,
+                                     self.http_version)
+        yield from self.putline(request_line)
+        # TODO: What if a header is already set?
+        self.headers.append(('User-Agent', 'asyncio-example-crawl/0.0'))
+        self.headers.append(('Host', self.netloc))
+        self.headers.append(('Accept', '*/*'))
+        # self.headers.append(('Accept-Encoding', 'gzip'))
+        for key, value in self.headers:
+            line = '%s: %s' % (key, value)
+            yield from self.putline(line)
+        yield from self.putline('')
+
+    @asyncio.coroutine
+    def get_response(self) -> Generator[Any, None, 'Response']:
+        """Receive the response."""
+        response = Response(self.log, self.conn.reader)
+        yield from response.read_headers()
+        return response
+
+
+class Response:
+    """HTTP response.
+
+    Call read_headers() to receive the request headers.  Then check
+    the status attribute and call get_header() to inspect the headers.
+    Finally call read() to receive the body.
+    """
+
+    def __init__(self, log: Logger, reader: asyncio.StreamReader) -> None:
+        self.log = log
+        self.reader = reader
+        self.http_version = None  # type: str  # 'HTTP/1.1'
+        self.status = None  # type: int  # 200
+        self.reason = None  # type: str  # 'Ok'
+        self.headers = []  # type: List[Tuple[str, str]]  # [('Content-Type', 'text/html')]
+
+    @asyncio.coroutine
+    def getline(self) -> Generator[Any, None, str]:
+        """Read one line from the connection."""
+        line = (yield from self.reader.readline()).decode('latin-1').rstrip()
+        self.log(2, '<', line)
+        return line
+
+    @asyncio.coroutine
+    def read_headers(self) -> Generator[Any, None, None]:
+        """Read the response status and the request headers."""
+        status_line = yield from self.getline()
+        status_parts = status_line.split(None, 2)
+        if len(status_parts) != 3:
+            self.log(0, 'bad status_line', repr(status_line))
+            raise BadStatusLine(status_line)
+        self.http_version, status, self.reason = status_parts
+        self.status = int(status)
+        while True:
+            header_line = yield from self.getline()
+            if not header_line:
+                break
+            # TODO: Continuation lines.
+            key, value = header_line.split(':', 1)
+            self.headers.append((key, value.strip()))
+
+    def get_redirect_url(self, default: str = '') -> str:
+        """Inspect the status and return the redirect url if appropriate."""
+        if self.status not in (300, 301, 302, 303, 307):
+            return default
+        return self.get_header('Location', default)
+
+    def get_header(self, key: str, default: str = '') -> str:
+        """Get one header value, using a case insensitive header name."""
+        key = key.lower()
+        for k, v in self.headers:
+            if k.lower() == key:
+                return v
+        return default
+
+    @asyncio.coroutine
+    def read(self) -> Generator[Any, None, bytes]:
+        """Read the response body.
+
+        This honors Content-Length and Transfer-Encoding: chunked.
+        """
+        nbytes = None
+        for key, value in self.headers:
+            if key.lower() == 'content-length':
+                nbytes = int(value)
+                break
+        if nbytes is None:
+            if self.get_header('transfer-encoding').lower() == 'chunked':
+                self.log(2, 'parsing chunked response')
+                blocks = []
+                while True:
+                    size_header = yield from self.reader.readline()
+                    if not size_header:
+                        self.log(0, 'premature end of chunked response')
+                        break
+                    self.log(3, 'size_header =', repr(size_header))
+                    parts = size_header.split(b';')
+                    size = int(parts[0], 16)
+                    if size:
+                        self.log(3, 'reading chunk of', size, 'bytes')
+                        block = yield from self.reader.readexactly(size)
+                        assert len(block) == size, (len(block), size)
+                        blocks.append(block)
+                    crlf = yield from self.reader.readline()
+                    assert crlf == b'\r\n', repr(crlf)
+                    if not size:
+                        break
+                body = b''.join(blocks)
+                self.log(1, 'chunked response had', len(body),
+                            'bytes in', len(blocks), 'blocks')
+            else:
+                self.log(3, 'reading until EOF')
+                body = yield from self.reader.read()
+                # TODO: Should make sure not to recycle the connection
+                # in this case.
+        else:
+            body = yield from self.reader.readexactly(nbytes)
+        return body
+
+
+class Fetcher:
+    """Logic and state for one URL.
+
+    When found in crawler.busy, this represents a URL to be fetched or
+    in the process of being fetched; when found in crawler.done, this
+    holds the results from fetching it.
+
+    This is usually associated with a task.  This references the
+    crawler for the connection pool and to add more URLs to its todo
+    list.
+
+    Call fetch() to do the fetching, then report() to print the results.
+    """
+
+    def __init__(self, log: Logger, url: str, crawler: 'Crawler',
+                 max_redirect: int = 10, max_tries: int = 4) -> None:
+        self.log = log
+        self.url = url
+        self.crawler = crawler
+        # We don't loop resolving redirects here -- we just use this
+        # to decide whether to add the redirect URL to crawler.todo.
+        self.max_redirect = max_redirect
+        # But we do loop to retry on errors a few times.
+        self.max_tries = max_tries
+        # Everything we collect from the response goes here.
+        self.task = None  # type: asyncio.Task
+        self.exceptions = []  # type: List[Exception]
+        self.tries = 0
+        self.request = None  # type: Request
+        self.response = None  # type: Response
+        self.body = None  # type: bytes
+        self.next_url = None  # type: str
+        self.ctype = None  # type: str
+        self.pdict = None  # type: Dict[str, str]
+        self.encoding = None  # type: str
+        self.urls = None  # type: Set[str]
+        self.new_urls = None  # type: Set[str]
+
+    @asyncio.coroutine
+    def fetch(self) -> Generator[Any, None, None]:
+        """Attempt to fetch the contents of the URL.
+
+        If successful, and the data is HTML, extract further links and
+        add them to the crawler.  Redirects are also added back there.
+        """
+        while self.tries < self.max_tries:
+            self.tries += 1
+            self.request = None
+            try:
+                self.request = Request(self.log, self.url, self.crawler.pool)
+                yield from self.request.connect()
+                yield from self.request.send_request()
+                self.response = yield from self.request.get_response()
+                self.body = yield from self.response.read()
+                h_conn = self.response.get_header('connection').lower()
+                if h_conn != 'close':
+                    self.request.close(recycle=True)
+                    self.request = None
+                if self.tries > 1:
+                    self.log(1, 'try', self.tries, 'for', self.url, 'success')
+                break
+            except (BadStatusLine, OSError) as exc:
+                self.exceptions.append(exc)
+                self.log(1, 'try', self.tries, 'for', self.url,
+                            'raised', repr(exc))
+                # import pdb; pdb.set_trace()
+                # Don't reuse the connection in this case.
+            finally:
+                if self.request is not None:
+                    self.request.close()
+        else:
+            # We never broke out of the while loop, i.e. all tries failed.
+            self.log(0, 'no success for', self.url,
+                        'in', self.max_tries, 'tries')
+            return
+        next_url = self.response.get_redirect_url()
+        if next_url:
+            self.next_url = urllib.parse.urljoin(self.url, next_url)
+            if self.max_redirect > 0:
+                self.log(1, 'redirect to', self.next_url, 'from', self.url)
+                self.crawler.add_url(self.next_url, self.max_redirect - 1)
+            else:
+                self.log(0, 'redirect limit reached for', self.next_url,
+                            'from', self.url)
+        else:
+            if self.response.status == 200:
+                self.ctype = self.response.get_header('content-type')
+                self.pdict = {}
+                if self.ctype:
+                    self.ctype, self.pdict = cgi.parse_header(self.ctype)
+                self.encoding = self.pdict.get('charset', 'utf-8')
+                if self.ctype == 'text/html':
+                    body = self.body.decode(self.encoding, 'replace')
+                    # Replace href with (?:href|src) to follow image links.
+                    self.urls = set(re.findall(r'(?i)href=["\']?([^\s"\'<>]+)',
+                                               body))
+                    if self.urls:
+                        self.log(1, 'got', len(self.urls),
+                                    'distinct urls from', self.url)
+                    self.new_urls = set()
+                    for url in self.urls:
+                        url = unescape(url)
+                        url = urllib.parse.urljoin(self.url, url)
+                        url, frag = urllib.parse.urldefrag(url)
+                        if self.crawler.add_url(url):
+                            self.new_urls.add(url)
+
+    def report(self, stats: 'Stats', file: IO[str] = None) -> None:
+        """Print a report on the state for this URL.
+
+        Also update the Stats instance.
+        """
+        if self.task is not None:
+            if not self.task.done():
+                stats.add('pending')
+                print(self.url, 'pending', file=file)
+                return
+            elif self.task.cancelled():
+                stats.add('cancelled')
+                print(self.url, 'cancelled', file=file)
+                return
+            elif self.task.exception():
+                stats.add('exception')
+                exc = self.task.exception()
+                stats.add('exception_' + exc.__class__.__name__)
+                print(self.url, exc, file=file)
+                return
+        if len(self.exceptions) == self.tries:
+            stats.add('fail')
+            exc = self.exceptions[-1]
+            stats.add('fail_' + str(exc.__class__.__name__))
+            print(self.url, 'error', exc, file=file)
+        elif self.next_url:
+            stats.add('redirect')
+            print(self.url, self.response.status, 'redirect', self.next_url,
+                  file=file)
+        elif self.ctype == 'text/html':
+            stats.add('html')
+            size = len(self.body or b'')
+            stats.add('html_bytes', size)
+            if self.log.level:
+                print(self.url, self.response.status,
+                      self.ctype, self.encoding,
+                      size,
+                      '%d/%d' % (len(self.new_urls or ()), len(self.urls or ())),
+                      file=file)
+        elif self.response is None:
+            print(self.url, 'no response object')
+        else:
+            size = len(self.body or b'')
+            if self.response.status == 200:
+                stats.add('other')
+                stats.add('other_bytes', size)
+            else:
+                stats.add('error')
+                stats.add('error_bytes', size)
+                stats.add('status_%s' % self.response.status)
+            print(self.url, self.response.status,
+                  self.ctype, self.encoding,
+                  size,
+                  file=file)
+
+
+class Stats:
+    """Record stats of various sorts."""
+
+    def __init__(self) -> None:
+        self.stats = {}  # type: Dict[str, int]
+
+    def add(self, key: str, count: int = 1) -> None:
+        self.stats[key] = self.stats.get(key, 0) + count
+
+    def report(self, file: IO[str] = None) -> None:
+        for key, count in sorted(self.stats.items()):
+            print('%10d' % count, key, file=file)
+
+
+class Crawler:
+    """Crawl a set of URLs.
+
+    This manages three disjoint sets of URLs (todo, busy, done).  The
+    data structures actually store dicts -- the values in todo give
+    the redirect limit, while the values in busy and done are Fetcher
+    instances.
+    """
+    def __init__(self, log: Logger,
+                 roots: Set[str], exclude: str = None, strict: bool = True,  # What to crawl.
+                 max_redirect: int = 10, max_tries: int = 4,  # Per-url limits.
+                 max_tasks: int = 10, max_pool: int = 10,  # Global limits.
+                 ) -> None:
+        self.log = log
+        self.roots = roots
+        self.exclude = exclude
+        self.strict = strict
+        self.max_redirect = max_redirect
+        self.max_tries = max_tries
+        self.max_tasks = max_tasks
+        self.max_pool = max_pool
+        self.todo = {}  # type: Dict[str, int]
+        self.busy = {}  # type: Dict[str, Fetcher]
+        self.done = {}  # type: Dict[str, Fetcher]
+        self.pool = ConnectionPool(self.log, max_pool, max_tasks)
+        self.root_domains = set()  # type: Set[str]
+        for root in roots:
+            host = urllib.parse.urlparse(root).hostname
+            if not host:
+                continue
+            if re.match(r'\A[\d\.]*\Z', host):
+                self.root_domains.add(host)
+            else:
+                host = host.lower()
+                if self.strict:
+                    self.root_domains.add(host)
+                    if host.startswith('www.'):
+                        self.root_domains.add(host[4:])
+                    else:
+                        self.root_domains.add('www.' + host)
+                else:
+                    parts = host.split('.')
+                    if len(parts) > 2:
+                        host = '.'.join(parts[-2:])
+                    self.root_domains.add(host)
+        for root in roots:
+            self.add_url(root)
+        self.governor = asyncio.Semaphore(max_tasks)
+        self.termination = asyncio.Condition()
+        self.t0 = time.time()
+        self.t1 = None  # type: Optional[float]
+
+    def close(self) -> None:
+        """Close resources (currently only the pool)."""
+        self.pool.close()
+
+    def host_okay(self, host: str) -> bool:
+        """Check if a host should be crawled.
+
+        A literal match (after lowercasing) is always good.  For hosts
+        that don't look like IP addresses, some approximate matches
+        are okay depending on the strict flag.
+        """
+        host = host.lower()
+        if host in self.root_domains:
+            return True
+        if re.match(r'\A[\d\.]*\Z', host):
+            return False
+        if self.strict:
+            return self._host_okay_strictish(host)
+        else:
+            return self._host_okay_lenient(host)
+
+    def _host_okay_strictish(self, host: str) -> bool:
+        """Check if a host should be crawled, strict-ish version.
+
+        This checks for equality modulo an initial 'www.' component.
+         """
+        if host.startswith('www.'):
+            if host[4:] in self.root_domains:
+                return True
+        else:
+            if 'www.' + host in self.root_domains:
+                return True
+        return False
+
+    def _host_okay_lenient(self, host: str) -> bool:
+        """Check if a host should be crawled, lenient version.
+
+        This compares the last two components of the host.
+        """
+        parts = host.split('.')
+        if len(parts) > 2:
+            host = '.'.join(parts[-2:])
+        return host in self.root_domains
+
+    def add_url(self, url: str, max_redirect: int = None) -> bool:
+        """Add a URL to the todo list if not seen before."""
+        if self.exclude and re.search(self.exclude, url):
+            return False
+        parsed = urllib.parse.urlparse(url)
+        if parsed.scheme not in ('http', 'https'):
+            self.log(2, 'skipping non-http scheme in', url)
+            return False
+        host = parsed.hostname
+        if not self.host_okay(host):
+            self.log(2, 'skipping non-root host in', url)
+            return False
+        if max_redirect is None:
+            max_redirect = self.max_redirect
+        if url in self.todo or url in self.busy or url in self.done:
+            return False
+        self.log(1, 'adding', url, max_redirect)
+        self.todo[url] = max_redirect
+        return True
+
+    @asyncio.coroutine
+    def crawl(self) -> Generator[Any, None, None]:
+        """Run the crawler until all finished."""
+        with (yield from self.termination):
+            while self.todo or self.busy:
+                if self.todo:
+                    url, max_redirect = self.todo.popitem()
+                    fetcher = Fetcher(self.log, url,
+                                      crawler=self,
+                                      max_redirect=max_redirect,
+                                      max_tries=self.max_tries,
+                                      )
+                    self.busy[url] = fetcher
+                    fetcher.task = asyncio.Task(self.fetch(fetcher))
+                else:
+                    yield from self.termination.wait()
+        self.t1 = time.time()
+
+    @asyncio.coroutine
+    def fetch(self, fetcher: Fetcher) -> Generator[Any, None, None]:
+        """Call the Fetcher's fetch(), with a limit on concurrency.
+
+        Once this returns, move the fetcher from busy to done.
+        """
+        url = fetcher.url
+        with (yield from self.governor):
+            try:
+                yield from fetcher.fetch()  # Fetcher gonna fetch.
+            finally:
+                # Force GC of the task, so the error is logged.
+                fetcher.task = None
+        with (yield from self.termination):
+            self.done[url] = fetcher
+            del self.busy[url]
+            self.termination.notify()
+
+    def report(self, file: IO[str] = None) -> None:
+        """Print a report on all completed URLs."""
+        if self.t1 is None:
+            self.t1 = time.time()
+        dt = self.t1 - self.t0
+        if dt and self.max_tasks:
+            speed = len(self.done) / dt / self.max_tasks
+        else:
+            speed = 0
+        stats = Stats()
+        print('*** Report ***', file=file)
+        try:
+            show = []  # type: List[Tuple[str, Fetcher]]
+            show.extend(self.done.items())
+            show.extend(self.busy.items())
+            show.sort()
+            for url, fetcher in show:
+                fetcher.report(stats, file=file)
+        except KeyboardInterrupt:
+            print('\nInterrupted', file=file)
+        print('Finished', len(self.done),
+              'urls in %.3f secs' % dt,
+              '(max_tasks=%d)' % self.max_tasks,
+              '(%.3f urls/sec/task)' % speed,
+              file=file)
+        stats.report(file=file)
+        print('Todo:', len(self.todo), file=file)
+        print('Busy:', len(self.busy), file=file)
+        print('Done:', len(self.done), file=file)
+        print('Date:', time.ctime(), 'local time', file=file)
+
+
+def main() -> None:
+    """Main program.
+
+    Parse arguments, set up event loop, run crawler, print report.
+    """
+    args = ARGS.parse_args()
+    if not args.roots:
+        print('Use --help for command line help')
+        return
+
+    log = Logger(args.level)
+
+    if args.iocp:
+        if sys.platform == 'win32':
+            from asyncio import ProactorEventLoop
+            loop = ProactorEventLoop()  # type: ignore
+            asyncio.set_event_loop(loop)
+        else:
+            assert False
+    elif args.select:
+        loop = asyncio.SelectorEventLoop()  # type: ignore
+        asyncio.set_event_loop(loop)
+    else:
+        loop = asyncio.get_event_loop()
+
+    roots = {fix_url(root) for root in args.roots}
+
+    crawler = Crawler(log,
+                      roots, exclude=args.exclude,
+                      strict=args.strict,
+                      max_redirect=args.max_redirect,
+                      max_tries=args.max_tries,
+                      max_tasks=args.max_tasks,
+                      max_pool=args.max_pool,
+                      )
+    try:
+        loop.run_until_complete(crawler.crawl())  # Crawler gonna crawl.
+    except KeyboardInterrupt:
+        sys.stderr.flush()
+        print('\nInterrupted\n')
+    finally:
+        crawler.report()
+        crawler.close()
+        loop.close()
+
+
+if __name__ == '__main__':
+    logging.basicConfig(level=logging.INFO) # type: ignore
+    main()
diff --git a/test-data/samples/crawl2.py b/test-data/samples/crawl2.py
new file mode 100644
index 0000000..5eaad70
--- /dev/null
+++ b/test-data/samples/crawl2.py
@@ -0,0 +1,852 @@
+#!/usr/bin/env python3.4
+
+"""A simple web crawler."""
+
+# This is cloned from <asyncio>/examples/crawl.py,
+# with type annotations added (PEP 484).
+#
+# This version (crawl2.) has also been converted to use `async def` +
+# `await` (PEP 492).
+
+import argparse
+import asyncio
+import cgi
+from http.client import BadStatusLine
+import logging
+import re
+import sys
+import time
+import urllib.parse
+from typing import Any, Awaitable, IO, Optional, Sequence, Set, Tuple
+
+
+ARGS = argparse.ArgumentParser(description="Web crawler")
+ARGS.add_argument(
+    '--iocp', action='store_true', dest='iocp',
+    default=False, help='Use IOCP event loop (Windows only)')
+ARGS.add_argument(
+    '--select', action='store_true', dest='select',
+    default=False, help='Use Select event loop instead of default')
+ARGS.add_argument(
+    'roots', nargs='*',
+    default=[], help='Root URL (may be repeated)')
+ARGS.add_argument(
+    '--max_redirect', action='store', type=int, metavar='N',
+    default=10, help='Limit redirection chains (for 301, 302 etc.)')
+ARGS.add_argument(
+    '--max_tries', action='store', type=int, metavar='N',
+    default=4, help='Limit retries on network errors')
+ARGS.add_argument(
+    '--max_tasks', action='store', type=int, metavar='N',
+    default=100, help='Limit concurrent connections')
+ARGS.add_argument(
+    '--max_pool', action='store', type=int, metavar='N',
+    default=100, help='Limit connection pool size')
+ARGS.add_argument(
+    '--exclude', action='store', metavar='REGEX',
+    help='Exclude matching URLs')
+ARGS.add_argument(
+    '--strict', action='store_true',
+    default=True, help='Strict host matching (default)')
+ARGS.add_argument(
+    '--lenient', action='store_false', dest='strict',
+    default=False, help='Lenient host matching')
+ARGS.add_argument(
+    '-v', '--verbose', action='count', dest='level',
+    default=1, help='Verbose logging (repeat for more verbose)')
+ARGS.add_argument(
+    '-q', '--quiet', action='store_const', const=0, dest='level',
+    default=1, help='Quiet logging (opposite of --verbose)')
+
+
+ESCAPES = [('quot', '"'),
+           ('gt', '>'),
+           ('lt', '<'),
+           ('amp', '&')  # Must be last.
+           ]
+
+
+def unescape(url: str) -> str:
+    """Turn & into &, and so on.
+
+    This is the inverse of cgi.escape().
+    """
+    for name, char in ESCAPES:
+        url = url.replace('&' + name + ';', char)
+    return url
+
+
+def fix_url(url: str) -> str:
+    """Prefix a schema-less URL with http://."""
+    if '://' not in url:
+        url = 'http://' + url
+    return url
+
+
+class Logger:
+
+    def __init__(self, level: int) -> None:
+        self.level = level
+
+    def _log(self, n: int, args: Sequence[Any]) -> None:
+        if self.level >= n:
+            print(*args, file=sys.stderr, flush=True)
+
+    def log(self, n: int, *args: Any) -> None:
+        self._log(n, args)
+
+    def __call__(self, n: int, *args: Any) -> None:
+        self._log(n, args)
+
+
+KeyTuple = Tuple[str, int, bool]
+
+
+class ConnectionPool:
+    """A connection pool.
+
+    To open a connection, use reserve().  To recycle it, use unreserve().
+
+    The pool is mostly just a mapping from (host, port, ssl) tuples to
+    lists of Connections.  The currently active connections are *not*
+    in the data structure; get_connection() takes the connection out,
+    and recycle_connection() puts it back in.  To recycle a
+    connection, call conn.close(recycle=True).
+
+    There are limits to both the overall pool and the per-key pool.
+    """
+
+    def __init__(self, log: Logger, max_pool: int = 10, max_tasks: int = 5) -> None:
+        self.log = log
+        self.max_pool = max_pool  # Overall limit.
+        self.max_tasks = max_tasks  # Per-key limit.
+        self.loop = asyncio.get_event_loop()
+        self.connections = {}  # type: Dict[KeyTuple, List[Connection]]
+        self.queue = []  # type: List[Connection]
+
+    def close(self) -> None:
+        """Close all connections available for reuse."""
+        for conns in self.connections.values():
+            for conn in conns:
+                conn.close()
+        self.connections.clear()
+        self.queue.clear()
+
+    async def get_connection(self, host: str, port: int, ssl: bool) -> 'Connection':
+        """Create or reuse a connection."""
+        port = port or (443 if ssl else 80)
+        try:
+            ipaddrs = await self.loop.getaddrinfo(host, port)
+        except Exception as exc:
+            self.log(0, 'Exception %r for (%r, %r)' % (exc, host, port))
+            raise
+        self.log(1, '* %s resolves to %s' %
+                    (host, ', '.join(ip[4][0] for ip in ipaddrs)))
+
+        # Look for a reusable connection.
+        for _1, _2, _3, _4, (h, p, *_5) in ipaddrs:
+            key = h, p, ssl
+            conn = None
+            conns = self.connections.get(key)
+            while conns:
+                conn = conns.pop(0)
+                self.queue.remove(conn)
+                if not conns:
+                    del self.connections[key]
+                if conn.stale():
+                    self.log(1, 'closing stale connection for', key)
+                    conn.close()  # Just in case.
+                else:
+                    self.log(1, '* Reusing pooled connection', key,
+                                'FD =', conn.fileno())
+                    return conn
+
+        # Create a new connection.
+        conn = Connection(self.log, self, host, port, ssl)
+        await conn.connect()
+        self.log(1, '* New connection', conn.key, 'FD =', conn.fileno())
+        return conn
+
+    def recycle_connection(self, conn: 'Connection') -> None:
+        """Make a connection available for reuse.
+
+        This also prunes the pool if it exceeds the size limits.
+        """
+        if conn.stale():
+            conn.close()
+            return
+
+        key = conn.key
+        conns = self.connections.setdefault(key, [])
+        conns.append(conn)
+        self.queue.append(conn)
+
+        if len(conns) <= self.max_tasks and len(self.queue) <= self.max_pool:
+            return
+
+        # Prune the queue.
+
+        # Close stale connections for this key first.
+        stale = [conn for conn in conns if conn.stale()]
+        if stale:
+            for conn in stale:
+                conns.remove(conn)
+                self.queue.remove(conn)
+                self.log(1, 'closing stale connection for', key)
+                conn.close()
+            if not conns:
+                del self.connections[key]
+
+        # Close oldest connection(s) for this key if limit reached.
+        while len(conns) > self.max_tasks:
+            conn = conns.pop(0)
+            self.queue.remove(conn)
+            self.log(1, 'closing oldest connection for', key)
+            conn.close()
+
+        if len(self.queue) <= self.max_pool:
+            return
+
+        # Close overall stale connections.
+        stale = [conn for conn in self.queue if conn.stale()]
+        if stale:
+            for conn in stale:
+                conns = self.connections.get(conn.key)
+                conns.remove(conn)
+                self.queue.remove(conn)
+                self.log(1, 'closing stale connection for', key)
+                conn.close()
+
+        # Close oldest overall connection(s) if limit reached.
+        while len(self.queue) > self.max_pool:
+            conn = self.queue.pop(0)
+            conns = self.connections.get(conn.key)
+            c = conns.pop(0)
+            assert conn == c, (conn.key, conn, c, conns)
+            self.log(1, 'closing overall oldest connection for', conn.key)
+            conn.close()
+
+
+class Connection:
+
+    def __init__(self, log: Logger, pool: ConnectionPool, host: str, port: int, ssl: bool) -> None:
+        self.log = log
+        self.pool = pool
+        self.host = host
+        self.port = port
+        self.ssl = ssl
+        self.reader = None  # type: asyncio.StreamReader
+        self.writer = None  # type: asyncio.StreamWriter
+        self.key = None  # type: KeyTuple
+
+    def stale(self) -> bool:
+        return self.reader is None or self.reader.at_eof()
+
+    def fileno(self) -> Optional[int]:
+        writer = self.writer
+        if writer is not None:
+            transport = writer.transport
+            if transport is not None:
+                sock = transport.get_extra_info('socket')
+                if sock is not None:
+                    return sock.fileno()
+        return None
+
+    async def connect(self) -> None:
+        self.reader, self.writer = await asyncio.open_connection(
+            self.host, self.port, ssl=self.ssl)
+        peername = self.writer.get_extra_info('peername')
+        if peername:
+            self.host, self.port = peername[:2]
+        else:
+            self.log(1, 'NO PEERNAME???', self.host, self.port, self.ssl)
+        self.key = self.host, self.port, self.ssl
+
+    def close(self, recycle: bool = False) -> None:
+        if recycle and not self.stale():
+            self.pool.recycle_connection(self)
+        else:
+            self.writer.close()
+            self.pool = self.reader = self.writer = None
+
+
+class Request:
+    """HTTP request.
+
+    Use connect() to open a connection; send_request() to send the
+    request; get_response() to receive the response headers.
+    """
+
+    def __init__(self, log: Logger, url: str, pool: ConnectionPool) -> None:
+        self.log = log
+        self.url = url
+        self.pool = pool
+        self.parts = urllib.parse.urlparse(self.url)
+        self.scheme = self.parts.scheme
+        assert self.scheme in ('http', 'https'), repr(url)
+        self.ssl = self.parts.scheme == 'https'
+        self.netloc = self.parts.netloc
+        self.hostname = self.parts.hostname
+        self.port = self.parts.port or (443 if self.ssl else 80)
+        self.path = (self.parts.path or '/')
+        self.query = self.parts.query
+        if self.query:
+            self.full_path = '%s?%s' % (self.path, self.query)
+        else:
+            self.full_path = self.path
+        self.http_version = 'HTTP/1.1'
+        self.method = 'GET'
+        self.headers = []  # type: List[Tuple[str, str]]
+        self.conn = None  # type: Connection
+
+    async def connect(self) -> None:
+        """Open a connection to the server."""
+        self.log(1, '* Connecting to %s:%s using %s for %s' %
+                    (self.hostname, self.port,
+                     'ssl' if self.ssl else 'tcp',
+                     self.url))
+        self.conn = await self.pool.get_connection(self.hostname,
+                                                        self.port, self.ssl)
+
+    def close(self, recycle: bool = False) -> None:
+        """Close the connection, recycle if requested."""
+        if self.conn is not None:
+            if not recycle:
+                self.log(1, 'closing connection for', self.conn.key)
+            self.conn.close(recycle)
+            self.conn = None
+
+    async def putline(self, line: str) -> None:
+        """Write a line to the connection.
+
+        Used for the request line and headers.
+        """
+        self.log(2, '>', line)
+        self.conn.writer.write(line.encode('latin-1') + b'\r\n')
+
+    async def send_request(self) -> None:
+        """Send the request."""
+        request_line = '%s %s %s' % (self.method, self.full_path,
+                                     self.http_version)
+        await self.putline(request_line)
+        # TODO: What if a header is already set?
+        self.headers.append(('User-Agent', 'asyncio-example-crawl/0.0'))
+        self.headers.append(('Host', self.netloc))
+        self.headers.append(('Accept', '*/*'))
+        # self.headers.append(('Accept-Encoding', 'gzip'))
+        for key, value in self.headers:
+            line = '%s: %s' % (key, value)
+            await self.putline(line)
+        await self.putline('')
+
+    async def get_response(self) -> 'Response':
+        """Receive the response."""
+        response = Response(self.log, self.conn.reader)
+        await response.read_headers()
+        return response
+
+
+class Response:
+    """HTTP response.
+
+    Call read_headers() to receive the request headers.  Then check
+    the status attribute and call get_header() to inspect the headers.
+    Finally call read() to receive the body.
+    """
+
+    def __init__(self, log: Logger, reader: asyncio.StreamReader) -> None:
+        self.log = log
+        self.reader = reader
+        self.http_version = None  # type: str  # 'HTTP/1.1'
+        self.status = None  # type: int  # 200
+        self.reason = None  # type: str  # 'Ok'
+        self.headers = []  # type: List[Tuple[str, str]]  # [('Content-Type', 'text/html')]
+
+    async def getline(self) -> str:
+        """Read one line from the connection."""
+        line = (await self.reader.readline()).decode('latin-1').rstrip()
+        self.log(2, '<', line)
+        return line
+
+    async def read_headers(self) -> None:
+        """Read the response status and the request headers."""
+        status_line = await self.getline()
+        status_parts = status_line.split(None, 2)
+        if len(status_parts) != 3:
+            self.log(0, 'bad status_line', repr(status_line))
+            raise BadStatusLine(status_line)
+        self.http_version, status, self.reason = status_parts
+        self.status = int(status)
+        while True:
+            header_line = await self.getline()
+            if not header_line:
+                break
+            # TODO: Continuation lines.
+            key, value = header_line.split(':', 1)
+            self.headers.append((key, value.strip()))
+
+    def get_redirect_url(self, default: str = '') -> str:
+        """Inspect the status and return the redirect url if appropriate."""
+        if self.status not in (300, 301, 302, 303, 307):
+            return default
+        return self.get_header('Location', default)
+
+    def get_header(self, key: str, default: str = '') -> str:
+        """Get one header value, using a case insensitive header name."""
+        key = key.lower()
+        for k, v in self.headers:
+            if k.lower() == key:
+                return v
+        return default
+
+    async def read(self) -> bytes:
+        """Read the response body.
+
+        This honors Content-Length and Transfer-Encoding: chunked.
+        """
+        nbytes = None
+        for key, value in self.headers:
+            if key.lower() == 'content-length':
+                nbytes = int(value)
+                break
+        if nbytes is None:
+            if self.get_header('transfer-encoding').lower() == 'chunked':
+                self.log(2, 'parsing chunked response')
+                blocks = []
+                while True:
+                    size_header = await self.reader.readline()
+                    if not size_header:
+                        self.log(0, 'premature end of chunked response')
+                        break
+                    self.log(3, 'size_header =', repr(size_header))
+                    parts = size_header.split(b';')
+                    size = int(parts[0], 16)
+                    if size:
+                        self.log(3, 'reading chunk of', size, 'bytes')
+                        block = await self.reader.readexactly(size)
+                        assert len(block) == size, (len(block), size)
+                        blocks.append(block)
+                    crlf = await self.reader.readline()
+                    assert crlf == b'\r\n', repr(crlf)
+                    if not size:
+                        break
+                body = b''.join(blocks)
+                self.log(1, 'chunked response had', len(body),
+                            'bytes in', len(blocks), 'blocks')
+            else:
+                self.log(3, 'reading until EOF')
+                body = await self.reader.read()
+                # TODO: Should make sure not to recycle the connection
+                # in this case.
+        else:
+            body = await self.reader.readexactly(nbytes)
+        return body
+
+
+class Fetcher:
+    """Logic and state for one URL.
+
+    When found in crawler.busy, this represents a URL to be fetched or
+    in the process of being fetched; when found in crawler.done, this
+    holds the results from fetching it.
+
+    This is usually associated with a task.  This references the
+    crawler for the connection pool and to add more URLs to its todo
+    list.
+
+    Call fetch() to do the fetching, then report() to print the results.
+    """
+
+    def __init__(self, log: Logger, url: str, crawler: 'Crawler',
+                 max_redirect: int = 10, max_tries: int = 4) -> None:
+        self.log = log
+        self.url = url
+        self.crawler = crawler
+        # We don't loop resolving redirects here -- we just use this
+        # to decide whether to add the redirect URL to crawler.todo.
+        self.max_redirect = max_redirect
+        # But we do loop to retry on errors a few times.
+        self.max_tries = max_tries
+        # Everything we collect from the response goes here.
+        self.task = None  # type: asyncio.Task
+        self.exceptions = []  # type: List[Exception]
+        self.tries = 0
+        self.request = None  # type: Request
+        self.response = None  # type: Response
+        self.body = None  # type: bytes
+        self.next_url = None  # type: str
+        self.ctype = None  # type: str
+        self.pdict = None  # type: Dict[str, str]
+        self.encoding = None  # type: str
+        self.urls = None  # type: Set[str]
+        self.new_urls = None  # type: Set[str]
+
+    async def fetch(self) -> None:
+        """Attempt to fetch the contents of the URL.
+
+        If successful, and the data is HTML, extract further links and
+        add them to the crawler.  Redirects are also added back there.
+        """
+        while self.tries < self.max_tries:
+            self.tries += 1
+            self.request = None
+            try:
+                self.request = Request(self.log, self.url, self.crawler.pool)
+                await self.request.connect()
+                await self.request.send_request()
+                self.response = await self.request.get_response()
+                self.body = await self.response.read()
+                h_conn = self.response.get_header('connection').lower()
+                if h_conn != 'close':
+                    self.request.close(recycle=True)
+                    self.request = None
+                if self.tries > 1:
+                    self.log(1, 'try', self.tries, 'for', self.url, 'success')
+                break
+            except (BadStatusLine, OSError) as exc:
+                self.exceptions.append(exc)
+                self.log(1, 'try', self.tries, 'for', self.url,
+                            'raised', repr(exc))
+                # import pdb; pdb.set_trace()
+                # Don't reuse the connection in this case.
+            finally:
+                if self.request is not None:
+                    self.request.close()
+        else:
+            # We never broke out of the while loop, i.e. all tries failed.
+            self.log(0, 'no success for', self.url,
+                        'in', self.max_tries, 'tries')
+            return
+        next_url = self.response.get_redirect_url()
+        if next_url:
+            self.next_url = urllib.parse.urljoin(self.url, next_url)
+            if self.max_redirect > 0:
+                self.log(1, 'redirect to', self.next_url, 'from', self.url)
+                self.crawler.add_url(self.next_url, self.max_redirect - 1)
+            else:
+                self.log(0, 'redirect limit reached for', self.next_url,
+                            'from', self.url)
+        else:
+            if self.response.status == 200:
+                self.ctype = self.response.get_header('content-type')
+                self.pdict = {}
+                if self.ctype:
+                    self.ctype, self.pdict = cgi.parse_header(self.ctype)
+                self.encoding = self.pdict.get('charset', 'utf-8')
+                if self.ctype == 'text/html':
+                    body = self.body.decode(self.encoding, 'replace')
+                    # Replace href with (?:href|src) to follow image links.
+                    self.urls = set(re.findall(r'(?i)href=["\']?([^\s"\'<>]+)',
+                                               body))
+                    if self.urls:
+                        self.log(1, 'got', len(self.urls),
+                                    'distinct urls from', self.url)
+                    self.new_urls = set()
+                    for url in self.urls:
+                        url = unescape(url)
+                        url = urllib.parse.urljoin(self.url, url)
+                        url, frag = urllib.parse.urldefrag(url)
+                        if self.crawler.add_url(url):
+                            self.new_urls.add(url)
+
+    def report(self, stats: 'Stats', file: IO[str] = None) -> None:
+        """Print a report on the state for this URL.
+
+        Also update the Stats instance.
+        """
+        if self.task is not None:
+            if not self.task.done():
+                stats.add('pending')
+                print(self.url, 'pending', file=file)
+                return
+            elif self.task.cancelled():
+                stats.add('cancelled')
+                print(self.url, 'cancelled', file=file)
+                return
+            elif self.task.exception():
+                stats.add('exception')
+                exc = self.task.exception()
+                stats.add('exception_' + exc.__class__.__name__)
+                print(self.url, exc, file=file)
+                return
+        if len(self.exceptions) == self.tries:
+            stats.add('fail')
+            exc = self.exceptions[-1]
+            stats.add('fail_' + str(exc.__class__.__name__))
+            print(self.url, 'error', exc, file=file)
+        elif self.next_url:
+            stats.add('redirect')
+            print(self.url, self.response.status, 'redirect', self.next_url,
+                  file=file)
+        elif self.ctype == 'text/html':
+            stats.add('html')
+            size = len(self.body or b'')
+            stats.add('html_bytes', size)
+            if self.log.level:
+                print(self.url, self.response.status,
+                      self.ctype, self.encoding,
+                      size,
+                      '%d/%d' % (len(self.new_urls or ()), len(self.urls or ())),
+                      file=file)
+        elif self.response is None:
+            print(self.url, 'no response object')
+        else:
+            size = len(self.body or b'')
+            if self.response.status == 200:
+                stats.add('other')
+                stats.add('other_bytes', size)
+            else:
+                stats.add('error')
+                stats.add('error_bytes', size)
+                stats.add('status_%s' % self.response.status)
+            print(self.url, self.response.status,
+                  self.ctype, self.encoding,
+                  size,
+                  file=file)
+
+
+class Stats:
+    """Record stats of various sorts."""
+
+    def __init__(self) -> None:
+        self.stats = {}  # type: Dict[str, int]
+
+    def add(self, key: str, count: int = 1) -> None:
+        self.stats[key] = self.stats.get(key, 0) + count
+
+    def report(self, file: IO[str] = None) -> None:
+        for key, count in sorted(self.stats.items()):
+            print('%10d' % count, key, file=file)
+
+
+class Crawler:
+    """Crawl a set of URLs.
+
+    This manages three disjoint sets of URLs (todo, busy, done).  The
+    data structures actually store dicts -- the values in todo give
+    the redirect limit, while the values in busy and done are Fetcher
+    instances.
+    """
+    def __init__(self, log: Logger,
+                 roots: Set[str], exclude: str = None, strict: bool = True,  # What to crawl.
+                 max_redirect: int = 10, max_tries: int = 4,  # Per-url limits.
+                 max_tasks: int = 10, max_pool: int = 10,  # Global limits.
+                 ) -> None:
+        self.log = log
+        self.roots = roots
+        self.exclude = exclude
+        self.strict = strict
+        self.max_redirect = max_redirect
+        self.max_tries = max_tries
+        self.max_tasks = max_tasks
+        self.max_pool = max_pool
+        self.todo = {}  # type: Dict[str, int]
+        self.busy = {}  # type: Dict[str, Fetcher]
+        self.done = {}  # type: Dict[str, Fetcher]
+        self.pool = ConnectionPool(self.log, max_pool, max_tasks)
+        self.root_domains = set()  # type: Set[str]
+        for root in roots:
+            host = urllib.parse.urlparse(root).hostname
+            if not host:
+                continue
+            if re.match(r'\A[\d\.]*\Z', host):
+                self.root_domains.add(host)
+            else:
+                host = host.lower()
+                if self.strict:
+                    self.root_domains.add(host)
+                    if host.startswith('www.'):
+                        self.root_domains.add(host[4:])
+                    else:
+                        self.root_domains.add('www.' + host)
+                else:
+                    parts = host.split('.')
+                    if len(parts) > 2:
+                        host = '.'.join(parts[-2:])
+                    self.root_domains.add(host)
+        for root in roots:
+            self.add_url(root)
+        self.governor = asyncio.Semaphore(max_tasks)
+        self.termination = asyncio.Condition()
+        self.t0 = time.time()
+        self.t1 = None  # type: Optional[float]
+
+    def close(self) -> None:
+        """Close resources (currently only the pool)."""
+        self.pool.close()
+
+    def host_okay(self, host: str) -> bool:
+        """Check if a host should be crawled.
+
+        A literal match (after lowercasing) is always good.  For hosts
+        that don't look like IP addresses, some approximate matches
+        are okay depending on the strict flag.
+        """
+        host = host.lower()
+        if host in self.root_domains:
+            return True
+        if re.match(r'\A[\d\.]*\Z', host):
+            return False
+        if self.strict:
+            return self._host_okay_strictish(host)
+        else:
+            return self._host_okay_lenient(host)
+
+    def _host_okay_strictish(self, host: str) -> bool:
+        """Check if a host should be crawled, strict-ish version.
+
+        This checks for equality modulo an initial 'www.' component.
+         """
+        if host.startswith('www.'):
+            if host[4:] in self.root_domains:
+                return True
+        else:
+            if 'www.' + host in self.root_domains:
+                return True
+        return False
+
+    def _host_okay_lenient(self, host: str) -> bool:
+        """Check if a host should be crawled, lenient version.
+
+        This compares the last two components of the host.
+        """
+        parts = host.split('.')
+        if len(parts) > 2:
+            host = '.'.join(parts[-2:])
+        return host in self.root_domains
+
+    def add_url(self, url: str, max_redirect: int = None) -> bool:
+        """Add a URL to the todo list if not seen before."""
+        if self.exclude and re.search(self.exclude, url):
+            return False
+        parsed = urllib.parse.urlparse(url)
+        if parsed.scheme not in ('http', 'https'):
+            self.log(2, 'skipping non-http scheme in', url)
+            return False
+        host = parsed.hostname
+        if not self.host_okay(host):
+            self.log(2, 'skipping non-root host in', url)
+            return False
+        if max_redirect is None:
+            max_redirect = self.max_redirect
+        if url in self.todo or url in self.busy or url in self.done:
+            return False
+        self.log(1, 'adding', url, max_redirect)
+        self.todo[url] = max_redirect
+        return True
+
+    async def crawl(self) -> None:
+        """Run the crawler until all finished."""
+        with (await self.termination):
+            while self.todo or self.busy:
+                if self.todo:
+                    url, max_redirect = self.todo.popitem()
+                    fetcher = Fetcher(self.log, url,
+                                      crawler=self,
+                                      max_redirect=max_redirect,
+                                      max_tries=self.max_tries,
+                                      )
+                    self.busy[url] = fetcher
+                    fetcher.task = asyncio.Task(self.fetch(fetcher))
+                else:
+                    await self.termination.wait()
+        self.t1 = time.time()
+
+    async def fetch(self, fetcher: Fetcher) -> None:
+        """Call the Fetcher's fetch(), with a limit on concurrency.
+
+        Once this returns, move the fetcher from busy to done.
+        """
+        url = fetcher.url
+        with (await self.governor):
+            try:
+                await fetcher.fetch()  # Fetcher gonna fetch.
+            finally:
+                # Force GC of the task, so the error is logged.
+                fetcher.task = None
+        with (await self.termination):
+            self.done[url] = fetcher
+            del self.busy[url]
+            self.termination.notify()
+
+    def report(self, file: IO[str] = None) -> None:
+        """Print a report on all completed URLs."""
+        if self.t1 is None:
+            self.t1 = time.time()
+        dt = self.t1 - self.t0
+        if dt and self.max_tasks:
+            speed = len(self.done) / dt / self.max_tasks
+        else:
+            speed = 0
+        stats = Stats()
+        print('*** Report ***', file=file)
+        try:
+            show = []  # type: List[Tuple[str, Fetcher]]
+            show.extend(self.done.items())
+            show.extend(self.busy.items())
+            show.sort()
+            for url, fetcher in show:
+                fetcher.report(stats, file=file)
+        except KeyboardInterrupt:
+            print('\nInterrupted', file=file)
+        print('Finished', len(self.done),
+              'urls in %.3f secs' % dt,
+              '(max_tasks=%d)' % self.max_tasks,
+              '(%.3f urls/sec/task)' % speed,
+              file=file)
+        stats.report(file=file)
+        print('Todo:', len(self.todo), file=file)
+        print('Busy:', len(self.busy), file=file)
+        print('Done:', len(self.done), file=file)
+        print('Date:', time.ctime(), 'local time', file=file)
+
+
+def main() -> None:
+    """Main program.
+
+    Parse arguments, set up event loop, run crawler, print report.
+    """
+    args = ARGS.parse_args()
+    if not args.roots:
+        print('Use --help for command line help')
+        return
+
+    log = Logger(args.level)
+
+    if args.iocp:
+        if sys.platform == 'win32':
+            from asyncio import ProactorEventLoop
+            loop = ProactorEventLoop()  # type: ignore
+            asyncio.set_event_loop(loop)
+        else:
+            assert False
+    elif args.select:
+        loop = asyncio.SelectorEventLoop()  # type: ignore
+        asyncio.set_event_loop(loop)
+    else:
+        loop = asyncio.get_event_loop()
+
+    roots = {fix_url(root) for root in args.roots}
+
+    crawler = Crawler(log,
+                      roots, exclude=args.exclude,
+                      strict=args.strict,
+                      max_redirect=args.max_redirect,
+                      max_tries=args.max_tries,
+                      max_tasks=args.max_tasks,
+                      max_pool=args.max_pool,
+                      )
+    try:
+        loop.run_until_complete(crawler.crawl())  # Crawler gonna crawl.
+    except KeyboardInterrupt:
+        sys.stderr.flush()
+        print('\nInterrupted\n')
+    finally:
+        crawler.report()
+        crawler.close()
+        loop.close()
+
+
+if __name__ == '__main__':
+    logging.basicConfig(level=logging.INFO) # type: ignore
+    main()
diff --git a/test-data/samples/dict.py b/test-data/samples/dict.py
new file mode 100644
index 0000000..d74a5b5
--- /dev/null
+++ b/test-data/samples/dict.py
@@ -0,0 +1,8 @@
+import typing
+prices = {'apple': 0.40, 'banana': 0.50}
+my_purchase = {
+    'apple': 1,
+    'banana': 6}
+grocery_bill = sum(prices[fruit] * my_purchase[fruit]
+                   for fruit in my_purchase)
+print('I owe the grocer $%.2f' % grocery_bill)
diff --git a/test-data/samples/fib.py b/test-data/samples/fib.py
new file mode 100644
index 0000000..26248c8
--- /dev/null
+++ b/test-data/samples/fib.py
@@ -0,0 +1,5 @@
+import typing
+parents, babies = (1, 1)
+while babies < 100:
+    print('This generation has {0} babies'.format(babies))
+    parents, babies = (babies, parents + babies)
diff --git a/test-data/samples/files.py b/test-data/samples/files.py
new file mode 100644
index 0000000..f540c7c
--- /dev/null
+++ b/test-data/samples/files.py
@@ -0,0 +1,14 @@
+# indent your Python code to put into an email
+import glob
+import typing
+# glob supports Unix style pathname extensions
+python_files = glob.glob('*.py')
+for file_name in sorted(python_files):
+    print('    ------' + file_name)
+
+    f = open(file_name)
+    for line in f:
+        print('    ' + line.rstrip())
+    f.close()
+
+    print()
diff --git a/test-data/samples/for.py b/test-data/samples/for.py
new file mode 100644
index 0000000..f7eeed4
--- /dev/null
+++ b/test-data/samples/for.py
@@ -0,0 +1,4 @@
+import typing
+friends = ['john', 'pat', 'gary', 'michael']
+for i, name in enumerate(friends):
+    print("iteration {iteration} is {name}".format(iteration=i, name=name))
diff --git a/test-data/samples/generators.py b/test-data/samples/generators.py
new file mode 100644
index 0000000..9150c96
--- /dev/null
+++ b/test-data/samples/generators.py
@@ -0,0 +1,24 @@
+# Prime number sieve with generators
+
+import itertools
+from typing import Iterator
+
+
+def iter_primes() -> Iterator[int]:
+    # an iterator of all numbers between 2 and +infinity
+    numbers = itertools.count(2)
+
+    # generate primes forever
+    while True:
+        # get the first number from the iterator (always a prime)
+        prime = next(numbers)
+        yield prime
+
+        # this code iteratively builds up a chain of
+        # filters...slightly tricky, but ponder it a bit
+        numbers = filter(prime.__rmod__, numbers)
+
+for p in iter_primes():
+    if p > 1000:
+        break
+    print(p)
diff --git a/test-data/samples/greet.py b/test-data/samples/greet.py
new file mode 100644
index 0000000..47e7626
--- /dev/null
+++ b/test-data/samples/greet.py
@@ -0,0 +1,8 @@
+import typing
+
+
+def greet(name: str) -> None:
+    print('Hello', name)
+greet('Jack')
+greet('Jill')
+greet('Bob')
diff --git a/test-data/samples/guess.py b/test-data/samples/guess.py
new file mode 100644
index 0000000..d3f1cee
--- /dev/null
+++ b/test-data/samples/guess.py
@@ -0,0 +1,32 @@
+# "Guess the Number" Game (edited) from http://inventwithpython.com
+
+import random
+import typing
+
+guesses_made = 0
+
+name = input('Hello! What is your name?\n')
+
+number = random.randint(1, 20)
+print('Well, {0}, I am thinking of a number between 1 and 20.'.format(name))
+
+while guesses_made < 6:
+
+    guess = int(input('Take a guess: '))
+
+    guesses_made += 1
+
+    if guess < number:
+        print('Your guess is too low.')
+
+    if guess > number:
+        print('Your guess is too high.')
+
+    if guess == number:
+        break
+
+if guess == number:
+    print('Good job, {0}! You guessed my number in {1} guesses!'.format(
+          name, guesses_made))
+else:
+    print('Nope. The number I was thinking of was {0}'.format(number))
diff --git a/test-data/samples/hello.py b/test-data/samples/hello.py
new file mode 100644
index 0000000..6c0b2ca
--- /dev/null
+++ b/test-data/samples/hello.py
@@ -0,0 +1,2 @@
+import typing
+print('Hello, world')
diff --git a/test-data/samples/input.py b/test-data/samples/input.py
new file mode 100644
index 0000000..cca9233
--- /dev/null
+++ b/test-data/samples/input.py
@@ -0,0 +1,3 @@
+import typing
+name = input('What is your name?\n')
+print('Hi, %s.' % name)
diff --git a/test-data/samples/itertool.py b/test-data/samples/itertool.py
new file mode 100644
index 0000000..9ee2475
--- /dev/null
+++ b/test-data/samples/itertool.py
@@ -0,0 +1,16 @@
+from itertools import groupby
+import typing
+lines = '''
+This is the
+first paragraph.
+
+This is the second.
+'''.splitlines()
+# Use itertools.groupby and bool to return groups of
+# consecutive lines that either have content or don't.
+for has_chars, frags in groupby(lines, bool):
+    if has_chars:
+        print(' '.join(frags))
+# PRINTS:
+# This is the first paragraph.
+# This is the second.
diff --git a/test-data/samples/readme.txt b/test-data/samples/readme.txt
new file mode 100644
index 0000000..5889a8e
--- /dev/null
+++ b/test-data/samples/readme.txt
@@ -0,0 +1,25 @@
+Mypy Sample Programs
+--------------------
+
+The sample programs use static typing unless otherwise noted in comments.
+
+Original credits for sample programs:
+
+  fib.py - Python Wiki [1]
+  for.py - Python Wiki [1]
+  greet.py - Python Wiki [1]
+  hello.py - Python Wiki [1]
+  input.py - Python Wiki [1]
+  regexp.py - Python Wiki [1]
+  dict.py - Python Wiki [1]
+  cmdline.py - Python Wiki [1]
+  files.py - Python Wiki [1]
+  bottles.py - Python Wiki [1]
+  class.py - Python Wiki [1]
+  guess.py - Python Wiki [1]
+  generators.py - Python Wiki [1]
+  itertool.py - Python Wiki [1]
+
+The sample programs were ported to mypy by Jukka Lehtosalo.
+
+[1] http://wiki.python.org/moin/SimplePrograms
diff --git a/test-data/samples/regexp.py b/test-data/samples/regexp.py
new file mode 100644
index 0000000..6d8d799
--- /dev/null
+++ b/test-data/samples/regexp.py
@@ -0,0 +1,7 @@
+import typing
+import re
+for test_string in ['555-1212', 'ILL-EGAL']:
+    if re.match(r'^\d{3}-\d{4}$', test_string):
+        print(test_string, 'is a valid US local phone number')
+    else:
+        print(test_string, 'rejected')
diff --git a/test-data/stdlib-samples/3.2/base64.py b/test-data/stdlib-samples/3.2/base64.py
new file mode 100644
index 0000000..ef91964
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/base64.py
@@ -0,0 +1,411 @@
+#! /usr/bin/env python3
+
+"""RFC 3548: Base16, Base32, Base64 Data Encodings"""
+
+# Modified 04-Oct-1995 by Jack Jansen to use binascii module
+# Modified 30-Dec-2003 by Barry Warsaw to add full RFC 3548 support
+# Modified 22-May-2007 by Guido van Rossum to use bytes everywhere
+
+import re
+import struct
+import binascii
+
+from typing import Dict, List, AnyStr, IO
+
+
+__all__ = [
+    # Legacy interface exports traditional RFC 1521 Base64 encodings
+    'encode', 'decode', 'encodebytes', 'decodebytes',
+    # Generalized interface for other encodings
+    'b64encode', 'b64decode', 'b32encode', 'b32decode',
+    'b16encode', 'b16decode',
+    # Standard Base64 encoding
+    'standard_b64encode', 'standard_b64decode',
+    # Some common Base64 alternatives.  As referenced by RFC 3458, see thread
+    # starting at:
+    #
+    # http://zgp.org/pipermail/p2p-hackers/2001-September/000316.html
+    'urlsafe_b64encode', 'urlsafe_b64decode',
+    ]
+
+
+bytes_types = (bytes, bytearray)  # Types acceptable as binary data
+
+
+def _translate(s: bytes, altchars: Dict[AnyStr, bytes]) -> bytes:
+    if not isinstance(s, bytes_types):
+        raise TypeError("expected bytes, not %s" % s.__class__.__name__)
+    translation = bytearray(range(256))
+    for k, v in altchars.items():
+        translation[ord(k)] = v[0]
+    return s.translate(translation)
+
+
+
+# Base64 encoding/decoding uses binascii
+
+def b64encode(s: bytes, altchars: bytes = None) -> bytes:
+    """Encode a byte string using Base64.
+
+    s is the byte string to encode.  Optional altchars must be a byte
+    string of length 2 which specifies an alternative alphabet for the
+    '+' and '/' characters.  This allows an application to
+    e.g. generate url or filesystem safe Base64 strings.
+
+    The encoded byte string is returned.
+    """
+    if not isinstance(s, bytes_types):
+        raise TypeError("expected bytes, not %s" % s.__class__.__name__)
+    # Strip off the trailing newline
+    encoded = binascii.b2a_base64(s)[:-1]
+    if altchars is not None:
+        if not isinstance(altchars, bytes_types):
+            raise TypeError("expected bytes, not %s"
+                            % altchars.__class__.__name__)
+        assert len(altchars) == 2, repr(altchars)
+        return _translate(encoded, {'+': altchars[0:1], '/': altchars[1:2]})
+    return encoded
+
+
+def b64decode(s: bytes, altchars: bytes = None,
+              validate: bool = False) -> bytes:
+    """Decode a Base64 encoded byte string.
+
+    s is the byte string to decode.  Optional altchars must be a
+    string of length 2 which specifies the alternative alphabet used
+    instead of the '+' and '/' characters.
+
+    The decoded string is returned.  A binascii.Error is raised if s is
+    incorrectly padded.
+
+    If validate is False (the default), non-base64-alphabet characters are
+    discarded prior to the padding check.  If validate is True,
+    non-base64-alphabet characters in the input result in a binascii.Error.
+    """
+    if not isinstance(s, bytes_types):
+        raise TypeError("expected bytes, not %s" % s.__class__.__name__)
+    if altchars is not None:
+        if not isinstance(altchars, bytes_types):
+            raise TypeError("expected bytes, not %s"
+                            % altchars.__class__.__name__)
+        assert len(altchars) == 2, repr(altchars)
+        s = _translate(s, {chr(altchars[0]): b'+', chr(altchars[1]): b'/'})
+    if validate and not re.match(b'^[A-Za-z0-9+/]*={0,2}$', s):
+        raise binascii.Error('Non-base64 digit found')
+    return binascii.a2b_base64(s)
+
+
+def standard_b64encode(s: bytes) -> bytes:
+    """Encode a byte string using the standard Base64 alphabet.
+
+    s is the byte string to encode.  The encoded byte string is returned.
+    """
+    return b64encode(s)
+
+def standard_b64decode(s: bytes) -> bytes:
+    """Decode a byte string encoded with the standard Base64 alphabet.
+
+    s is the byte string to decode.  The decoded byte string is
+    returned.  binascii.Error is raised if the input is incorrectly
+    padded or if there are non-alphabet characters present in the
+    input.
+    """
+    return b64decode(s)
+
+def urlsafe_b64encode(s: bytes) -> bytes:
+    """Encode a byte string using a url-safe Base64 alphabet.
+
+    s is the byte string to encode.  The encoded byte string is
+    returned.  The alphabet uses '-' instead of '+' and '_' instead of
+    '/'.
+    """
+    return b64encode(s, b'-_')
+
+def urlsafe_b64decode(s: bytes) -> bytes:
+    """Decode a byte string encoded with the standard Base64 alphabet.
+
+    s is the byte string to decode.  The decoded byte string is
+    returned.  binascii.Error is raised if the input is incorrectly
+    padded or if there are non-alphabet characters present in the
+    input.
+
+    The alphabet uses '-' instead of '+' and '_' instead of '/'.
+    """
+    return b64decode(s, b'-_')
+
+
+
+# Base32 encoding/decoding must be done in Python
+_b32alphabet = {
+    0: b'A',  9: b'J', 18: b'S', 27: b'3',
+    1: b'B', 10: b'K', 19: b'T', 28: b'4',
+    2: b'C', 11: b'L', 20: b'U', 29: b'5',
+    3: b'D', 12: b'M', 21: b'V', 30: b'6',
+    4: b'E', 13: b'N', 22: b'W', 31: b'7',
+    5: b'F', 14: b'O', 23: b'X',
+    6: b'G', 15: b'P', 24: b'Y',
+    7: b'H', 16: b'Q', 25: b'Z',
+    8: b'I', 17: b'R', 26: b'2',
+    }
+
+_b32tab = [v[0] for k, v in sorted(_b32alphabet.items())]
+_b32rev = dict([(v[0], k) for k, v in _b32alphabet.items()])
+
+
+def b32encode(s: bytes) -> bytes:
+    """Encode a byte string using Base32.
+
+    s is the byte string to encode.  The encoded byte string is returned.
+    """
+    if not isinstance(s, bytes_types):
+        raise TypeError("expected bytes, not %s" % s.__class__.__name__)
+    quanta, leftover = divmod(len(s), 5)
+    # Pad the last quantum with zero bits if necessary
+    if leftover:
+        s = s + bytes(5 - leftover)  # Don't use += !
+        quanta += 1
+    encoded = bytes()
+    for i in range(quanta):
+        # c1 and c2 are 16 bits wide, c3 is 8 bits wide.  The intent of this
+        # code is to process the 40 bits in units of 5 bits.  So we take the 1
+        # leftover bit of c1 and tack it onto c2.  Then we take the 2 leftover
+        # bits of c2 and tack them onto c3.  The shifts and masks are intended
+        # to give us values of exactly 5 bits in width.
+        c1, c2, c3 = struct.unpack('!HHB', s[i*5:(i+1)*5]) # type: (int, int, int)
+        c2 += (c1 & 1) << 16 # 17 bits wide
+        c3 += (c2 & 3) << 8  # 10 bits wide
+        encoded += bytes([_b32tab[c1 >> 11],         # bits 1 - 5
+                          _b32tab[(c1 >> 6) & 0x1f], # bits 6 - 10
+                          _b32tab[(c1 >> 1) & 0x1f], # bits 11 - 15
+                          _b32tab[c2 >> 12],         # bits 16 - 20 (1 - 5)
+                          _b32tab[(c2 >> 7) & 0x1f], # bits 21 - 25 (6 - 10)
+                          _b32tab[(c2 >> 2) & 0x1f], # bits 26 - 30 (11 - 15)
+                          _b32tab[c3 >> 5],          # bits 31 - 35 (1 - 5)
+                          _b32tab[c3 & 0x1f],        # bits 36 - 40 (1 - 5)
+                          ])
+    # Adjust for any leftover partial quanta
+    if leftover == 1:
+        return encoded[:-6] + b'======'
+    elif leftover == 2:
+        return encoded[:-4] + b'===='
+    elif leftover == 3:
+        return encoded[:-3] + b'==='
+    elif leftover == 4:
+        return encoded[:-1] + b'='
+    return encoded
+
+
+def b32decode(s: bytes, casefold: bool = False, map01: bytes = None) -> bytes:
+    """Decode a Base32 encoded byte string.
+
+    s is the byte string to decode.  Optional casefold is a flag
+    specifying whether a lowercase alphabet is acceptable as input.
+    For security purposes, the default is False.
+
+    RFC 3548 allows for optional mapping of the digit 0 (zero) to the
+    letter O (oh), and for optional mapping of the digit 1 (one) to
+    either the letter I (eye) or letter L (el).  The optional argument
+    map01 when not None, specifies which letter the digit 1 should be
+    mapped to (when map01 is not None, the digit 0 is always mapped to
+    the letter O).  For security purposes the default is None, so that
+    0 and 1 are not allowed in the input.
+
+    The decoded byte string is returned.  binascii.Error is raised if
+    the input is incorrectly padded or if there are non-alphabet
+    characters present in the input.
+    """
+    if not isinstance(s, bytes_types):
+        raise TypeError("expected bytes, not %s" % s.__class__.__name__)
+    quanta, leftover = divmod(len(s), 8)
+    if leftover:
+        raise binascii.Error('Incorrect padding')
+    # Handle section 2.4 zero and one mapping.  The flag map01 will be either
+    # False, or the character to map the digit 1 (one) to.  It should be
+    # either L (el) or I (eye).
+    if map01 is not None:
+        if not isinstance(map01, bytes_types):
+            raise TypeError("expected bytes, not %s" % map01.__class__.__name__)
+        assert len(map01) == 1, repr(map01)
+        s = _translate(s, {b'0': b'O', b'1': map01})
+    if casefold:
+        s = s.upper()
+    # Strip off pad characters from the right.  We need to count the pad
+    # characters because this will tell us how many null bytes to remove from
+    # the end of the decoded string.
+    padchars = 0
+    mo = re.search(b'(?P<pad>[=]*)$', s)
+    if mo:
+        padchars = len(mo.group('pad'))
+        if padchars > 0:
+            s = s[:-padchars]
+    # Now decode the full quanta
+    parts = []  # type: List[bytes]
+    acc = 0
+    shift = 35
+    for c in s:
+        val = _b32rev.get(c)
+        if val is None:
+            raise TypeError('Non-base32 digit found')
+        acc += _b32rev[c] << shift
+        shift -= 5
+        if shift < 0:
+            parts.append(binascii.unhexlify(bytes('%010x' % acc, "ascii")))
+            acc = 0
+            shift = 35
+    # Process the last, partial quanta
+    last = binascii.unhexlify(bytes('%010x' % acc, "ascii"))
+    if padchars == 0:
+        last = b''                      # No characters
+    elif padchars == 1:
+        last = last[:-1]
+    elif padchars == 3:
+        last = last[:-2]
+    elif padchars == 4:
+        last = last[:-3]
+    elif padchars == 6:
+        last = last[:-4]
+    else:
+        raise binascii.Error('Incorrect padding')
+    parts.append(last)
+    return b''.join(parts)
+
+
+
+# RFC 3548, Base 16 Alphabet specifies uppercase, but hexlify() returns
+# lowercase.  The RFC also recommends against accepting input case
+# insensitively.
+def b16encode(s: bytes) -> bytes:
+    """Encode a byte string using Base16.
+
+    s is the byte string to encode.  The encoded byte string is returned.
+    """
+    if not isinstance(s, bytes_types):
+        raise TypeError("expected bytes, not %s" % s.__class__.__name__)
+    return binascii.hexlify(s).upper()
+
+
+def b16decode(s: bytes, casefold: bool = False) -> bytes:
+    """Decode a Base16 encoded byte string.
+
+    s is the byte string to decode.  Optional casefold is a flag
+    specifying whether a lowercase alphabet is acceptable as input.
+    For security purposes, the default is False.
+
+    The decoded byte string is returned.  binascii.Error is raised if
+    s were incorrectly padded or if there are non-alphabet characters
+    present in the string.
+    """
+    if not isinstance(s, bytes_types):
+        raise TypeError("expected bytes, not %s" % s.__class__.__name__)
+    if casefold:
+        s = s.upper()
+    if re.search(b'[^0-9A-F]', s):
+        raise binascii.Error('Non-base16 digit found')
+    return binascii.unhexlify(s)
+
+
+
+# Legacy interface.  This code could be cleaned up since I don't believe
+# binascii has any line length limitations.  It just doesn't seem worth it
+# though.  The files should be opened in binary mode.
+
+MAXLINESIZE = 76 # Excluding the CRLF
+MAXBINSIZE = (MAXLINESIZE//4)*3
+
+def encode(input: IO[bytes], output: IO[bytes]) -> None:
+    """Encode a file; input and output are binary files."""
+    while True:
+        s = input.read(MAXBINSIZE)
+        if not s:
+            break
+        while len(s) < MAXBINSIZE:
+            ns = input.read(MAXBINSIZE-len(s))
+            if not ns:
+                break
+            s += ns
+        line = binascii.b2a_base64(s)
+        output.write(line)
+
+
+def decode(input: IO[bytes], output: IO[bytes]) -> None:
+    """Decode a file; input and output are binary files."""
+    while True:
+        line = input.readline()
+        if not line:
+            break
+        s = binascii.a2b_base64(line)
+        output.write(s)
+
+
+def encodebytes(s: bytes) -> bytes:
+    """Encode a bytestring into a bytestring containing multiple lines
+    of base-64 data."""
+    if not isinstance(s, bytes_types):
+        raise TypeError("expected bytes, not %s" % s.__class__.__name__)
+    pieces = []  # type: List[bytes]
+    for i in range(0, len(s), MAXBINSIZE):
+        chunk = s[i : i + MAXBINSIZE]
+        pieces.append(binascii.b2a_base64(chunk))
+    return b"".join(pieces)
+
+def encodestring(s: bytes) -> bytes:
+    """Legacy alias of encodebytes()."""
+    import warnings
+    warnings.warn("encodestring() is a deprecated alias, use encodebytes()",
+                  DeprecationWarning, 2)
+    return encodebytes(s)
+
+
+def decodebytes(s: bytes) -> bytes:
+    """Decode a bytestring of base-64 data into a bytestring."""
+    if not isinstance(s, bytes_types):
+        raise TypeError("expected bytes, not %s" % s.__class__.__name__)
+    return binascii.a2b_base64(s)
+
+def decodestring(s: bytes) -> bytes:
+    """Legacy alias of decodebytes()."""
+    import warnings
+    warnings.warn("decodestring() is a deprecated alias, use decodebytes()",
+                  DeprecationWarning, 2)
+    return decodebytes(s)
+
+
+# Usable as a script...
+def main() -> None:
+    """Small main program"""
+    import sys, getopt
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], 'deut')
+    except getopt.error as msg:
+        sys.stdout = sys.stderr
+        print(msg)
+        print("""usage: %s [-d|-e|-u|-t] [file|-]
+        -d, -u: decode
+        -e: encode (default)
+        -t: encode and decode string 'Aladdin:open sesame'"""%sys.argv[0])
+        sys.exit(2)
+    func = encode
+    for o, a in opts:
+        if o == '-e': func = encode
+        if o == '-d': func = decode
+        if o == '-u': func = decode
+        if o == '-t': test(); return
+    if args and args[0] != '-':
+        with open(args[0], 'rb') as f:
+            func(f, sys.stdout.buffer)
+    else:
+        func(sys.stdin.buffer, sys.stdout.buffer)
+
+
+def test() -> None:
+    s0 = b"Aladdin:open sesame"
+    print(repr(s0))
+    s1 = encodebytes(s0)
+    print(repr(s1))
+    s2 = decodebytes(s1)
+    print(repr(s2))
+    assert s0 == s2
+
+
+if __name__ == '__main__':
+    main()
diff --git a/test-data/stdlib-samples/3.2/fnmatch.py b/test-data/stdlib-samples/3.2/fnmatch.py
new file mode 100644
index 0000000..ec27b90
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/fnmatch.py
@@ -0,0 +1,112 @@
+"""Filename matching with shell patterns.
+
+fnmatch(FILENAME, PATTERN) matches according to the local convention.
+fnmatchcase(FILENAME, PATTERN) always takes case in account.
+
+The functions operate by translating the pattern into a regular
+expression.  They cache the compiled regular expressions for speed.
+
+The function translate(PATTERN) returns a regular expression
+corresponding to PATTERN.  (It does not compile it.)
+"""
+import os
+import posixpath
+import re
+import functools
+
+from typing import Iterable, List, AnyStr, Any, Callable, Match
+
+__all__ = ["filter", "fnmatch", "fnmatchcase", "translate"]
+
+def fnmatch(name: AnyStr, pat: AnyStr) -> bool:
+    """Test whether FILENAME matches PATTERN.
+
+    Patterns are Unix shell style:
+
+    *       matches everything
+    ?       matches any single character
+    [seq]   matches any character in seq
+    [!seq]  matches any char not in seq
+
+    An initial period in FILENAME is not special.
+    Both FILENAME and PATTERN are first case-normalized
+    if the operating system requires it.
+    If you don't want this, use fnmatchcase(FILENAME, PATTERN).
+    """
+    name = os.path.normcase(name)
+    pat = os.path.normcase(pat)
+    return fnmatchcase(name, pat)
+
+ at functools.lru_cache(maxsize=250)
+def _compile_pattern(pat: AnyStr,
+                     is_bytes: bool = False) -> Callable[[AnyStr],
+                                                         Match[AnyStr]]:
+    if isinstance(pat, bytes):
+        pat_str = str(pat, 'ISO-8859-1')
+        res_str = translate(pat_str)
+        res = bytes(res_str, 'ISO-8859-1')
+    else:
+        res = translate(pat)
+    return re.compile(res).match
+
+def filter(names: Iterable[AnyStr], pat: AnyStr) -> List[AnyStr]:
+    """Return the subset of the list NAMES that match PAT."""
+    result = []  # type: List[AnyStr]
+    pat = os.path.normcase(pat)
+    match = _compile_pattern(pat, isinstance(pat, bytes))
+    if os.path is posixpath:
+        # normcase on posix is NOP. Optimize it away from the loop.
+        for name in names:
+            if match(name):
+                result.append(name)
+    else:
+        for name in names:
+            if match(os.path.normcase(name)):
+                result.append(name)
+    return result
+
+def fnmatchcase(name: AnyStr, pat: AnyStr) -> bool:
+    """Test whether FILENAME matches PATTERN, including case.
+
+    This is a version of fnmatch() which doesn't case-normalize
+    its arguments.
+    """
+    match = _compile_pattern(pat, isinstance(pat, bytes))
+    return match(name) is not None
+
+def translate(pat: str) -> str:
+    """Translate a shell PATTERN to a regular expression.
+
+    There is no way to quote meta-characters.
+    """
+
+    i, n = 0, len(pat)
+    res = ''
+    while i < n:
+        c = pat[i]
+        i = i+1
+        if c == '*':
+            res = res + '.*'
+        elif c == '?':
+            res = res + '.'
+        elif c == '[':
+            j = i
+            if j < n and pat[j] == '!':
+                j = j+1
+            if j < n and pat[j] == ']':
+                j = j+1
+            while j < n and pat[j] != ']':
+                j = j+1
+            if j >= n:
+                res = res + '\\['
+            else:
+                stuff = pat[i:j].replace('\\','\\\\')
+                i = j+1
+                if stuff[0] == '!':
+                    stuff = '^' + stuff[1:]
+                elif stuff[0] == '^':
+                    stuff = '\\' + stuff
+                res = '%s[%s]' % (res, stuff)
+        else:
+            res = res + re.escape(c)
+    return res + '\Z(?ms)'
diff --git a/test-data/stdlib-samples/3.2/genericpath.py b/test-data/stdlib-samples/3.2/genericpath.py
new file mode 100644
index 0000000..bd1fddf
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/genericpath.py
@@ -0,0 +1,112 @@
+"""
+Path operations common to more than one OS
+Do not use directly.  The OS specific modules import the appropriate
+functions from this module themselves.
+"""
+import os
+import stat
+
+from typing import (
+    Any as Any_, List as List_, AnyStr as AnyStr_, Tuple as Tuple_
+)
+
+__all__ = ['commonprefix', 'exists', 'getatime', 'getctime', 'getmtime',
+           'getsize', 'isdir', 'isfile']
+
+
+# Does a path exist?
+# This is false for dangling symbolic links on systems that support them.
+def exists(path: AnyStr_) -> bool:
+    """Test whether a path exists.  Returns False for broken symbolic links"""
+    try:
+        os.stat(path)
+    except os.error:
+        return False
+    return True
+
+
+# This follows symbolic links, so both islink() and isdir() can be true
+# for the same path ono systems that support symlinks
+def isfile(path: AnyStr_) -> bool:
+    """Test whether a path is a regular file"""
+    try:
+        st = os.stat(path)
+    except os.error:
+        return False
+    return stat.S_ISREG(st.st_mode)
+
+
+# Is a path a directory?
+# This follows symbolic links, so both islink() and isdir()
+# can be true for the same path on systems that support symlinks
+def isdir(s: AnyStr_) -> bool:
+    """Return true if the pathname refers to an existing directory."""
+    try:
+        st = os.stat(s)
+    except os.error:
+        return False
+    return stat.S_ISDIR(st.st_mode)
+
+
+def getsize(filename: AnyStr_) -> int:
+    """Return the size of a file, reported by os.stat()."""
+    return os.stat(filename).st_size
+
+
+def getmtime(filename: AnyStr_) -> float:
+    """Return the last modification time of a file, reported by os.stat()."""
+    return os.stat(filename).st_mtime
+
+
+def getatime(filename: AnyStr_) -> float:
+    """Return the last access time of a file, reported by os.stat()."""
+    return os.stat(filename).st_atime
+
+
+def getctime(filename: AnyStr_) -> float:
+    """Return the metadata change time of a file, reported by os.stat()."""
+    return os.stat(filename).st_ctime
+
+
+# Return the longest prefix of all list elements.
+def commonprefix(m: List_[Any_]) -> Any_:
+    "Given a list of pathnames, returns the longest common leading component"
+    if not m: return ''
+    s1 = min(m)
+    s2 = max(m)
+    for i, c in enumerate(s1):
+        if c != s2[i]:
+            return s1[:i]
+    return s1
+
+
+# Split a path in root and extension.
+# The extension is everything starting at the last dot in the last
+# pathname component; the root is everything before that.
+# It is always true that root + ext == p.
+
+# Generic implementation of splitext, to be parametrized with
+# the separators
+def _splitext(p: AnyStr_, sep: AnyStr_, altsep: AnyStr_,
+              extsep: AnyStr_) -> Tuple_[AnyStr_, AnyStr_]:
+    """Split the extension from a pathname.
+
+    Extension is everything from the last dot to the end, ignoring
+    leading dots.  Returns "(root, ext)"; ext may be empty."""
+    # NOTE: This code must work for text and bytes strings.
+
+    sepIndex = p.rfind(sep)
+    if altsep:
+        altsepIndex = p.rfind(altsep)
+        sepIndex = max(sepIndex, altsepIndex)
+
+    dotIndex = p.rfind(extsep)
+    if dotIndex > sepIndex:
+        # skip all leading dots
+        filenameIndex = sepIndex + 1
+        while filenameIndex < dotIndex:
+            if p[filenameIndex:filenameIndex+1] != extsep:
+                return p[:dotIndex], p[dotIndex:]
+            filenameIndex += 1
+
+    return p, p[:0]
diff --git a/test-data/stdlib-samples/3.2/getopt.py b/test-data/stdlib-samples/3.2/getopt.py
new file mode 100644
index 0000000..32f5bce
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/getopt.py
@@ -0,0 +1,220 @@
+"""Parser for command line options.
+
+This module helps scripts to parse the command line arguments in
+sys.argv.  It supports the same conventions as the Unix getopt()
+function (including the special meanings of arguments of the form `-'
+and `--').  Long options similar to those supported by GNU software
+may be used as well via an optional third argument.  This module
+provides two functions and an exception:
+
+getopt() -- Parse command line options
+gnu_getopt() -- Like getopt(), but allow option and non-option arguments
+to be intermixed.
+GetoptError -- exception (class) raised with 'opt' attribute, which is the
+option involved with the exception.
+"""
+
+# Long option support added by Lars Wirzenius <liw at iki.fi>.
+#
+# Gerrit Holl <gerrit at nl.linux.org> moved the string-based exceptions
+# to class-based exceptions.
+#
+# Peter Ã
strand <astrand at lysator.liu.se> added gnu_getopt().
+#
+# TODO for gnu_getopt():
+#
+# - GNU getopt_long_only mechanism
+# - allow the caller to specify ordering
+# - RETURN_IN_ORDER option
+# - GNU extension with '-' as first character of option string
+# - optional arguments, specified by double colons
+# - a option string with a W followed by semicolon should
+#   treat "-W foo" as "--foo"
+
+__all__ = ["GetoptError","error","getopt","gnu_getopt"]
+
+import os
+
+from typing import List, Tuple, Iterable
+
+class GetoptError(Exception):
+    opt = ''
+    msg = ''
+    def __init__(self, msg: str, opt: str = '') -> None:
+        self.msg = msg
+        self.opt = opt
+        Exception.__init__(self, msg, opt)
+
+    def __str__(self) -> str:
+        return self.msg
+
+error = GetoptError # backward compatibility
+
+def getopt(args: List[str], shortopts: str,
+           longopts: Iterable[str]  =  []) -> Tuple[List[Tuple[str, str]],
+                                                    List[str]]:
+    """getopt(args, options[, long_options]) -> opts, args
+
+    Parses command line options and parameter list.  args is the
+    argument list to be parsed, without the leading reference to the
+    running program.  Typically, this means "sys.argv[1:]".  shortopts
+    is the string of option letters that the script wants to
+    recognize, with options that require an argument followed by a
+    colon (i.e., the same format that Unix getopt() uses).  If
+    specified, longopts is a list of strings with the names of the
+    long options which should be supported.  The leading '--'
+    characters should not be included in the option name.  Options
+    which require an argument should be followed by an equal sign
+    ('=').
+
+    The return value consists of two elements: the first is a list of
+    (option, value) pairs; the second is the list of program arguments
+    left after the option list was stripped (this is a trailing slice
+    of the first argument).  Each option-and-value pair returned has
+    the option as its first element, prefixed with a hyphen (e.g.,
+    '-x'), and the option argument as its second element, or an empty
+    string if the option has no argument.  The options occur in the
+    list in the same order in which they were found, thus allowing
+    multiple occurrences.  Long and short options may be mixed.
+
+    """
+
+    opts = []  # type: List[Tuple[str, str]]
+    if isinstance(longopts, str):
+        longopts = [longopts]
+    else:
+        longopts = list(longopts)
+    while args and args[0].startswith('-') and args[0] != '-':
+        if args[0] == '--':
+            args = args[1:]
+            break
+        if args[0].startswith('--'):
+            opts, args = do_longs(opts, args[0][2:], longopts, args[1:])
+        else:
+            opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:])
+
+    return opts, args
+
+def gnu_getopt(args: List[str], shortopts: str,
+               longopts: Iterable[str]  =  []) -> Tuple[List[Tuple[str, str]],
+                                                        List[str]]:
+    """getopt(args, options[, long_options]) -> opts, args
+
+    This function works like getopt(), except that GNU style scanning
+    mode is used by default. This means that option and non-option
+    arguments may be intermixed. The getopt() function stops
+    processing options as soon as a non-option argument is
+    encountered.
+
+    If the first character of the option string is `+', or if the
+    environment variable POSIXLY_CORRECT is set, then option
+    processing stops as soon as a non-option argument is encountered.
+
+    """
+
+    opts = []  # type: List[Tuple[str, str]]
+    prog_args = []  # type: List[str]
+    if isinstance(longopts, str):
+        longopts = [longopts]
+    else:
+        longopts = list(longopts)
+
+    # Allow options after non-option arguments?
+    if shortopts.startswith('+'):
+        shortopts = shortopts[1:]
+        all_options_first = True
+    elif os.environ.get("POSIXLY_CORRECT"):
+        all_options_first = True
+    else:
+        all_options_first = False
+
+    while args:
+        if args[0] == '--':
+            prog_args += args[1:]
+            break
+
+        if args[0][:2] == '--':
+            opts, args = do_longs(opts, args[0][2:], longopts, args[1:])
+        elif args[0][:1] == '-' and args[0] != '-':
+            opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:])
+        else:
+            if all_options_first:
+                prog_args += args
+                break
+            else:
+                prog_args.append(args[0])
+                args = args[1:]
+
+    return opts, prog_args
+
+def do_longs(opts: List[Tuple[str, str]], opt: str,
+             longopts: List[str],
+             args: List[str]) -> Tuple[List[Tuple[str, str]], List[str]]:
+    try:
+        i = opt.index('=')
+    except ValueError:
+        optarg = None # type: str
+    else:
+        opt, optarg = opt[:i], opt[i+1:]
+
+    has_arg, opt = long_has_args(opt, longopts)
+    if has_arg:
+        if optarg is None:
+            if not args:
+                raise GetoptError('option --%s requires argument' % opt, opt)
+            optarg, args = args[0], args[1:]
+    elif optarg is not None:
+        raise GetoptError('option --%s must not have an argument' % opt, opt)
+    opts.append(('--' + opt, optarg or ''))
+    return opts, args
+
+# Return:
+#   has_arg?
+#   full option name
+def long_has_args(opt: str, longopts: List[str]) -> Tuple[bool, str]:
+    possibilities = [o for o in longopts if o.startswith(opt)]
+    if not possibilities:
+        raise GetoptError('option --%s not recognized' % opt, opt)
+    # Is there an exact match?
+    if opt in possibilities:
+        return False, opt
+    elif opt + '=' in possibilities:
+        return True, opt
+    # No exact match, so better be unique.
+    if len(possibilities) > 1:
+        # XXX since possibilities contains all valid continuations, might be
+        # nice to work them into the error msg
+        raise GetoptError('option --%s not a unique prefix' % opt, opt)
+    assert len(possibilities) == 1
+    unique_match = possibilities[0]
+    has_arg = unique_match.endswith('=')
+    if has_arg:
+        unique_match = unique_match[:-1]
+    return has_arg, unique_match
+
+def do_shorts(opts: List[Tuple[str, str]], optstring: str,
+              shortopts: str, args: List[str]) -> Tuple[List[Tuple[str, str]],
+                                                        List[str]]:
+    while optstring != '':
+        opt, optstring = optstring[0], optstring[1:]
+        if short_has_arg(opt, shortopts):
+            if optstring == '':
+                if not args:
+                    raise GetoptError('option -%s requires argument' % opt,
+                                      opt)
+                optstring, args = args[0], args[1:]
+            optarg, optstring = optstring, ''
+        else:
+            optarg = ''
+        opts.append(('-' + opt, optarg))
+    return opts, args
+
+def short_has_arg(opt: str, shortopts: str) -> bool:
+    for i in range(len(shortopts)):
+        if opt == shortopts[i] != ':':
+            return shortopts.startswith(':', i+1)
+    raise GetoptError('option -%s not recognized' % opt, opt)
+
+if __name__ == '__main__':
+    import sys
+    print(getopt(sys.argv[1:], "a:b", ["alpha=", "beta"]))
diff --git a/test-data/stdlib-samples/3.2/glob.py b/test-data/stdlib-samples/3.2/glob.py
new file mode 100644
index 0000000..0f3d5f5
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/glob.py
@@ -0,0 +1,84 @@
+"""Filename globbing utility."""
+
+import os
+import re
+import fnmatch
+
+from typing import List, Iterator, Iterable, Any, AnyStr
+
+__all__ = ["glob", "iglob"]
+
+def glob(pathname: AnyStr) -> List[AnyStr]:
+    """Return a list of paths matching a pathname pattern.
+
+    The pattern may contain simple shell-style wildcards a la fnmatch.
+
+    """
+    return list(iglob(pathname))
+
+def iglob(pathname: AnyStr) -> Iterator[AnyStr]:
+    """Return an iterator which yields the paths matching a pathname pattern.
+
+    The pattern may contain simple shell-style wildcards a la fnmatch.
+
+    """
+    if not has_magic(pathname):
+        if os.path.lexists(pathname):
+            yield pathname
+        return
+    dirname, basename = os.path.split(pathname)
+    if not dirname:
+        for name in glob1(None, basename):
+            yield name
+        return
+    if has_magic(dirname):
+        dirs = iglob(dirname) # type: Iterable[AnyStr]
+    else:
+        dirs = [dirname]
+    if has_magic(basename):
+        glob_in_dir = glob1 # type: Any
+    else:
+        glob_in_dir = glob0
+    for dirname in dirs:
+        for name in glob_in_dir(dirname, basename):
+            yield os.path.join(dirname, name)
+
+# These 2 helper functions non-recursively glob inside a literal directory.
+# They return a list of basenames. `glob1` accepts a pattern while `glob0`
+# takes a literal basename (so it only has to check for its existence).
+
+def glob1(dirname: AnyStr, pattern: AnyStr) -> List[AnyStr]:
+    if not dirname:
+        if isinstance(pattern, bytes):
+            dirname = bytes(os.curdir, 'ASCII')
+        else:
+            dirname = os.curdir
+    try:
+        names = os.listdir(dirname)
+    except os.error:
+        return []
+    if pattern[0] != '.':
+        names = [x for x in names if x[0] != '.']
+    return fnmatch.filter(names, pattern)
+
+def glob0(dirname: AnyStr, basename: AnyStr) -> List[AnyStr]:
+    if basename == '':
+        # `os.path.split()` returns an empty basename for paths ending with a
+        # directory separator.  'q*x/' should match only directories.
+        if os.path.isdir(dirname):
+            return [basename]
+    else:
+        if os.path.lexists(os.path.join(dirname, basename)):
+            return [basename]
+    return []
+
+
+magic_check = re.compile('[*?[]')
+magic_check_bytes = re.compile(b'[*?[]')
+
+def has_magic(s: AnyStr) -> bool:
+    if isinstance(s, bytes):
+        match = magic_check_bytes.search(s)
+    else:
+        match = magic_check.search(s)
+    return match is not None
diff --git a/test-data/stdlib-samples/3.2/incomplete/logging/__init__.py b/test-data/stdlib-samples/3.2/incomplete/logging/__init__.py
new file mode 100644
index 0000000..aa861eb
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/incomplete/logging/__init__.py
@@ -0,0 +1,1873 @@
+# Copyright 2001-2010 by Vinay Sajip. All Rights Reserved.
+#
+# Permission to use, copy, modify, and distribute this software and its
+# documentation for any purpose and without fee is hereby granted,
+# provided that the above copyright notice appear in all copies and that
+# both that copyright notice and this permission notice appear in
+# supporting documentation, and that the name of Vinay Sajip
+# not be used in advertising or publicity pertaining to distribution
+# of the software without specific, written prior permission.
+# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
+# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
+# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
+# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
+# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Logging package for Python. Based on PEP 282 and comments thereto in
+comp.lang.python, and influenced by Apache's log4j system.
+
+Copyright (C) 2001-2011 Vinay Sajip. All Rights Reserved.
+
+To use, simply 'import logging' and log away!
+"""
+
+import sys, os, time, io, traceback, warnings, weakref
+from string import Template
+
+__all__ = ['BASIC_FORMAT', 'BufferingFormatter', 'CRITICAL', 'DEBUG', 'ERROR',
+           'FATAL', 'FileHandler', 'Filter', 'Formatter', 'Handler', 'INFO',
+           'LogRecord', 'Logger', 'LoggerAdapter', 'NOTSET', 'NullHandler',
+           'StreamHandler', 'WARN', 'WARNING', 'addLevelName', 'basicConfig',
+           'captureWarnings', 'critical', 'debug', 'disable', 'error',
+           'exception', 'fatal', 'getLevelName', 'getLogger', 'getLoggerClass',
+           'info', 'log', 'makeLogRecord', 'setLoggerClass', 'warn', 'warning',
+           'getLogRecordFactory', 'setLogRecordFactory', 'lastResort']
+
+import codecs
+
+import _thread as thread
+import threading
+
+__author__  = "Vinay Sajip <vinay_sajip at red-dove.com>"
+__status__  = "production"
+__version__ = "0.5.1.2"
+__date__    = "07 February 2010"
+
+#---------------------------------------------------------------------------
+#   Miscellaneous module data
+#---------------------------------------------------------------------------
+
+#
+# _srcfile is used when walking the stack to check when we've got the first
+# caller stack frame.
+#
+if hasattr(sys, 'frozen'): #support for py2exe
+    _srcfile = "logging%s__init__%s" % (os.sep, __file__[-4:])
+else:
+    _srcfile = __file__
+_srcfile = os.path.normcase(_srcfile)
+
+# next bit filched from 1.5.2's inspect.py
+def _currentframe():
+    """Return the frame object for the caller's stack frame."""
+    try:
+        raise Exception
+    except:
+        return sys.exc_info()[2].tb_frame.f_back
+currentframe = _currentframe
+
+if hasattr(sys, '_getframe'): currentframe = lambda: sys._getframe(3)
+# done filching
+
+# _srcfile is only used in conjunction with sys._getframe().
+# To provide compatibility with older versions of Python, set _srcfile
+# to None if _getframe() is not available; this value will prevent
+# findCaller() from being called.
+#if not hasattr(sys, "_getframe"):
+#    _srcfile = None
+
+#
+#_startTime is used as the base when calculating the relative time of events
+#
+_startTime = time.time()
+
+#
+#raiseExceptions is used to see if exceptions during handling should be
+#propagated
+#
+raiseExceptions = 1
+
+#
+# If you don't want threading information in the log, set this to zero
+#
+logThreads = 1
+
+#
+# If you don't want multiprocessing information in the log, set this to zero
+#
+logMultiprocessing = 1
+
+#
+# If you don't want process information in the log, set this to zero
+#
+logProcesses = 1
+
+#---------------------------------------------------------------------------
+#   Level related stuff
+#---------------------------------------------------------------------------
+#
+# Default levels and level names, these can be replaced with any positive set
+# of values having corresponding names. There is a pseudo-level, NOTSET, which
+# is only really there as a lower limit for user-defined levels. Handlers and
+# loggers are initialized with NOTSET so that they will log all messages, even
+# at user-defined levels.
+#
+
+CRITICAL = 50
+FATAL = CRITICAL
+ERROR = 40
+WARNING = 30
+WARN = WARNING
+INFO = 20
+DEBUG = 10
+NOTSET = 0
+
+_levelNames = {
+    CRITICAL : 'CRITICAL',
+    ERROR : 'ERROR',
+    WARNING : 'WARNING',
+    INFO : 'INFO',
+    DEBUG : 'DEBUG',
+    NOTSET : 'NOTSET',
+    'CRITICAL' : CRITICAL,
+    'ERROR' : ERROR,
+    'WARN' : WARNING,
+    'WARNING' : WARNING,
+    'INFO' : INFO,
+    'DEBUG' : DEBUG,
+    'NOTSET' : NOTSET,
+}
+
+def getLevelName(level):
+    """
+    Return the textual representation of logging level 'level'.
+
+    If the level is one of the predefined levels (CRITICAL, ERROR, WARNING,
+    INFO, DEBUG) then you get the corresponding string. If you have
+    associated levels with names using addLevelName then the name you have
+    associated with 'level' is returned.
+
+    If a numeric value corresponding to one of the defined levels is passed
+    in, the corresponding string representation is returned.
+
+    Otherwise, the string "Level %s" % level is returned.
+    """
+    return _levelNames.get(level, ("Level %s" % level))
+
+def addLevelName(level, levelName):
+    """
+    Associate 'levelName' with 'level'.
+
+    This is used when converting levels to text during message formatting.
+    """
+    _acquireLock()
+    try:    #unlikely to cause an exception, but you never know...
+        _levelNames[level] = levelName
+        _levelNames[levelName] = level
+    finally:
+        _releaseLock()
+
+def _checkLevel(level):
+    if isinstance(level, int):
+        rv = level
+    elif str(level) == level:
+        if level not in _levelNames:
+            raise ValueError("Unknown level: %r" % level)
+        rv = _levelNames[level]
+    else:
+        raise TypeError("Level not an integer or a valid string: %r" % level)
+    return rv
+
+#---------------------------------------------------------------------------
+#   Thread-related stuff
+#---------------------------------------------------------------------------
+
+#
+#_lock is used to serialize access to shared data structures in this module.
+#This needs to be an RLock because fileConfig() creates and configures
+#Handlers, and so might arbitrary user threads. Since Handler code updates the
+#shared dictionary _handlers, it needs to acquire the lock. But if configuring,
+#the lock would already have been acquired - so we need an RLock.
+#The same argument applies to Loggers and Manager.loggerDict.
+#
+if thread:
+    _lock = threading.RLock()
+else:
+    _lock = None
+
+
+def _acquireLock():
+    """
+    Acquire the module-level lock for serializing access to shared data.
+
+    This should be released with _releaseLock().
+    """
+    if _lock:
+        _lock.acquire()
+
+def _releaseLock():
+    """
+    Release the module-level lock acquired by calling _acquireLock().
+    """
+    if _lock:
+        _lock.release()
+
+#---------------------------------------------------------------------------
+#   The logging record
+#---------------------------------------------------------------------------
+
+class LogRecord(object):
+    """
+    A LogRecord instance represents an event being logged.
+
+    LogRecord instances are created every time something is logged. They
+    contain all the information pertinent to the event being logged. The
+    main information passed in is in msg and args, which are combined
+    using str(msg) % args to create the message field of the record. The
+    record also includes information such as when the record was created,
+    the source line where the logging call was made, and any exception
+    information to be logged.
+    """
+    def __init__(self, name, level, pathname, lineno,
+                 msg, args, exc_info, func=None, sinfo=None, **kwargs):
+        """
+        Initialize a logging record with interesting information.
+        """
+        ct = time.time()
+        self.name = name
+        self.msg = msg
+        #
+        # The following statement allows passing of a dictionary as a sole
+        # argument, so that you can do something like
+        #  logging.debug("a %(a)d b %(b)s", {'a':1, 'b':2})
+        # Suggested by Stefan Behnel.
+        # Note that without the test for args[0], we get a problem because
+        # during formatting, we test to see if the arg is present using
+        # 'if self.args:'. If the event being logged is e.g. 'Value is %d'
+        # and if the passed arg fails 'if self.args:' then no formatting
+        # is done. For example, logger.warn('Value is %d', 0) would log
+        # 'Value is %d' instead of 'Value is 0'.
+        # For the use case of passing a dictionary, this should not be a
+        # problem.
+        if args and len(args) == 1 and isinstance(args[0], dict) and args[0]:
+            args = args[0]
+        self.args = args
+        self.levelname = getLevelName(level)
+        self.levelno = level
+        self.pathname = pathname
+        try:
+            self.filename = os.path.basename(pathname)
+            self.module = os.path.splitext(self.filename)[0]
+        except (TypeError, ValueError, AttributeError):
+            self.filename = pathname
+            self.module = "Unknown module"
+        self.exc_info = exc_info
+        self.exc_text = None      # used to cache the traceback text
+        self.stack_info = sinfo
+        self.lineno = lineno
+        self.funcName = func
+        self.created = ct
+        self.msecs = (ct - int(ct)) * 1000
+        self.relativeCreated = (self.created - _startTime) * 1000
+        if logThreads and thread:
+            self.thread = thread.get_ident()
+            self.threadName = threading.current_thread().name
+        else:
+            self.thread = None
+            self.threadName = None
+        if not logMultiprocessing:
+            self.processName = None
+        else:
+            self.processName = 'MainProcess'
+            mp = sys.modules.get('multiprocessing')
+            if mp is not None:
+                # Errors may occur if multiprocessing has not finished loading
+                # yet - e.g. if a custom import hook causes third-party code
+                # to run when multiprocessing calls import. See issue 8200
+                # for an example
+                try:
+                    self.processName = mp.current_process().name
+                except Exception:
+                    pass
+        if logProcesses and hasattr(os, 'getpid'):
+            self.process = os.getpid()
+        else:
+            self.process = None
+
+    def __str__(self):
+        return '<LogRecord: %s, %s, %s, %s, "%s">'%(self.name, self.levelno,
+            self.pathname, self.lineno, self.msg)
+
+    def getMessage(self):
+        """
+        Return the message for this LogRecord.
+
+        Return the message for this LogRecord after merging any user-supplied
+        arguments with the message.
+        """
+        msg = str(self.msg)
+        if self.args:
+            msg = msg % self.args
+        return msg
+
+#
+#   Determine which class to use when instantiating log records.
+#
+_logRecordFactory = LogRecord
+
+def setLogRecordFactory(factory):
+    """
+    Set the factory to be used when instantiating a log record.
+
+    :param factory: A callable which will be called to instantiate
+    a log record.
+    """
+    global _logRecordFactory
+    _logRecordFactory = factory
+
+def getLogRecordFactory():
+    """
+    Return the factory to be used when instantiating a log record.
+    """
+
+    return _logRecordFactory
+
+def makeLogRecord(dict):
+    """
+    Make a LogRecord whose attributes are defined by the specified dictionary,
+    This function is useful for converting a logging event received over
+    a socket connection (which is sent as a dictionary) into a LogRecord
+    instance.
+    """
+    rv = _logRecordFactory(None, None, "", 0, "", (), None, None)
+    rv.__dict__.update(dict)
+    return rv
+
+#---------------------------------------------------------------------------
+#   Formatter classes and functions
+#---------------------------------------------------------------------------
+
+class PercentStyle(object):
+
+    default_format = '%(message)s'
+    asctime_format = '%(asctime)s'
+    asctime_search = '%(asctime)'
+
+    def __init__(self, fmt):
+        self._fmt = fmt or self.default_format
+
+    def usesTime(self):
+        return self._fmt.find(self.asctime_search) >= 0
+
+    def format(self, record):
+        return self._fmt % record.__dict__
+
+class StrFormatStyle(PercentStyle):
+    default_format = '{message}'
+    asctime_format = '{asctime}'
+    asctime_search = '{asctime'
+
+    def format(self, record):
+        return self._fmt.format(**record.__dict__)
+
+
+class StringTemplateStyle(PercentStyle):
+    default_format = '${message}'
+    asctime_format = '${asctime}'
+    asctime_search = '${asctime}'
+
+    def __init__(self, fmt):
+        self._fmt = fmt or self.default_format
+        self._tpl = Template(self._fmt)
+
+    def usesTime(self):
+        fmt = self._fmt
+        return fmt.find('$asctime') >= 0 or fmt.find(self.asctime_format) >= 0
+
+    def format(self, record):
+        return self._tpl.substitute(**record.__dict__)
+
+_STYLES = {
+    '%': PercentStyle,
+    '{': StrFormatStyle,
+    '$': StringTemplateStyle
+}
+
+class Formatter(object):
+    """
+    Formatter instances are used to convert a LogRecord to text.
+
+    Formatters need to know how a LogRecord is constructed. They are
+    responsible for converting a LogRecord to (usually) a string which can
+    be interpreted by either a human or an external system. The base Formatter
+    allows a formatting string to be specified. If none is supplied, the
+    default value of "%s(message)" is used.
+
+    The Formatter can be initialized with a format string which makes use of
+    knowledge of the LogRecord attributes - e.g. the default value mentioned
+    above makes use of the fact that the user's message and arguments are pre-
+    formatted into a LogRecord's message attribute. Currently, the useful
+    attributes in a LogRecord are described by:
+
+    %(name)s            Name of the logger (logging channel)
+    %(levelno)s         Numeric logging level for the message (DEBUG, INFO,
+                        WARNING, ERROR, CRITICAL)
+    %(levelname)s       Text logging level for the message ("DEBUG", "INFO",
+                        "WARNING", "ERROR", "CRITICAL")
+    %(pathname)s        Full pathname of the source file where the logging
+                        call was issued (if available)
+    %(filename)s        Filename portion of pathname
+    %(module)s          Module (name portion of filename)
+    %(lineno)d          Source line number where the logging call was issued
+                        (if available)
+    %(funcName)s        Function name
+    %(created)f         Time when the LogRecord was created (time.time()
+                        return value)
+    %(asctime)s         Textual time when the LogRecord was created
+    %(msecs)d           Millisecond portion of the creation time
+    %(relativeCreated)d Time in milliseconds when the LogRecord was created,
+                        relative to the time the logging module was loaded
+                        (typically at application startup time)
+    %(thread)d          Thread ID (if available)
+    %(threadName)s      Thread name (if available)
+    %(process)d         Process ID (if available)
+    %(message)s         The result of record.getMessage(), computed just as
+                        the record is emitted
+    """
+
+    converter = time.localtime
+
+    def __init__(self, fmt=None, datefmt=None, style='%'):
+        """
+        Initialize the formatter with specified format strings.
+
+        Initialize the formatter either with the specified format string, or a
+        default as described above. Allow for specialized date formatting with
+        the optional datefmt argument (if omitted, you get the ISO8601 format).
+
+        Use a style parameter of '%', '{' or '$' to specify that you want to
+        use one of %-formatting, :meth:`str.format` (``{}``) formatting or
+        :class:`string.Template` formatting in your format string.
+
+        .. versionchanged: 3.2
+           Added the ``style`` parameter.
+        """
+        if style not in _STYLES:
+            raise ValueError('Style must be one of: %s' % ','.join(
+                             _STYLES.keys()))
+        self._style = _STYLES[style](fmt)
+        self._fmt = self._style._fmt
+        self.datefmt = datefmt
+
+    def formatTime(self, record, datefmt=None):
+        """
+        Return the creation time of the specified LogRecord as formatted text.
+
+        This method should be called from format() by a formatter which
+        wants to make use of a formatted time. This method can be overridden
+        in formatters to provide for any specific requirement, but the
+        basic behaviour is as follows: if datefmt (a string) is specified,
+        it is used with time.strftime() to format the creation time of the
+        record. Otherwise, the ISO8601 format is used. The resulting
+        string is returned. This function uses a user-configurable function
+        to convert the creation time to a tuple. By default, time.localtime()
+        is used; to change this for a particular formatter instance, set the
+        'converter' attribute to a function with the same signature as
+        time.localtime() or time.gmtime(). To change it for all formatters,
+        for example if you want all logging times to be shown in GMT,
+        set the 'converter' attribute in the Formatter class.
+        """
+        ct = self.converter(record.created)
+        if datefmt:
+            s = time.strftime(datefmt, ct)
+        else:
+            t = time.strftime("%Y-%m-%d %H:%M:%S", ct)
+            s = "%s,%03d" % (t, record.msecs) # the use of % here is internal
+        return s
+
+    def formatException(self, ei):
+        """
+        Format and return the specified exception information as a string.
+
+        This default implementation just uses
+        traceback.print_exception()
+        """
+        sio = io.StringIO()
+        tb = ei[2]
+        # See issues #9427, #1553375. Commented out for now.
+        #if getattr(self, 'fullstack', False):
+        #    traceback.print_stack(tb.tb_frame.f_back, file=sio)
+        traceback.print_exception(ei[0], ei[1], tb, None, sio)
+        s = sio.getvalue()
+        sio.close()
+        if s[-1:] == "\n":
+            s = s[:-1]
+        return s
+
+    def usesTime(self):
+        """
+        Check if the format uses the creation time of the record.
+        """
+        return self._style.usesTime()
+
+    def formatMessage(self, record):
+        return self._style.format(record)
+
+    def formatStack(self, stack_info):
+        """
+        This method is provided as an extension point for specialized
+        formatting of stack information.
+
+        The input data is a string as returned from a call to
+        :func:`traceback.print_stack`, but with the last trailing newline
+        removed.
+
+        The base implementation just returns the value passed in.
+        """
+        return stack_info
+
+    def format(self, record):
+        """
+        Format the specified record as text.
+
+        The record's attribute dictionary is used as the operand to a
+        string formatting operation which yields the returned string.
+        Before formatting the dictionary, a couple of preparatory steps
+        are carried out. The message attribute of the record is computed
+        using LogRecord.getMessage(). If the formatting string uses the
+        time (as determined by a call to usesTime(), formatTime() is
+        called to format the event time. If there is exception information,
+        it is formatted using formatException() and appended to the message.
+        """
+        record.message = record.getMessage()
+        if self.usesTime():
+            record.asctime = self.formatTime(record, self.datefmt)
+        s = self.formatMessage(record)
+        if record.exc_info:
+            # Cache the traceback text to avoid converting it multiple times
+            # (it's constant anyway)
+            if not record.exc_text:
+                record.exc_text = self.formatException(record.exc_info)
+        if record.exc_text:
+            if s[-1:] != "\n":
+                s = s + "\n"
+            s = s + record.exc_text
+        if record.stack_info:
+            if s[-1:] != "\n":
+                s = s + "\n"
+            s = s + self.formatStack(record.stack_info)
+        return s
+
+#
+#   The default formatter to use when no other is specified
+#
+_defaultFormatter = Formatter()
+
+class BufferingFormatter(object):
+    """
+    A formatter suitable for formatting a number of records.
+    """
+    def __init__(self, linefmt=None):
+        """
+        Optionally specify a formatter which will be used to format each
+        individual record.
+        """
+        if linefmt:
+            self.linefmt = linefmt
+        else:
+            self.linefmt = _defaultFormatter
+
+    def formatHeader(self, records):
+        """
+        Return the header string for the specified records.
+        """
+        return ""
+
+    def formatFooter(self, records):
+        """
+        Return the footer string for the specified records.
+        """
+        return ""
+
+    def format(self, records):
+        """
+        Format the specified records and return the result as a string.
+        """
+        rv = ""
+        if len(records) > 0:
+            rv = rv + self.formatHeader(records)
+            for record in records:
+                rv = rv + self.linefmt.format(record)
+            rv = rv + self.formatFooter(records)
+        return rv
+
+#---------------------------------------------------------------------------
+#   Filter classes and functions
+#---------------------------------------------------------------------------
+
+class Filter(object):
+    """
+    Filter instances are used to perform arbitrary filtering of LogRecords.
+
+    Loggers and Handlers can optionally use Filter instances to filter
+    records as desired. The base filter class only allows events which are
+    below a certain point in the logger hierarchy. For example, a filter
+    initialized with "A.B" will allow events logged by loggers "A.B",
+    "A.B.C", "A.B.C.D", "A.B.D" etc. but not "A.BB", "B.A.B" etc. If
+    initialized with the empty string, all events are passed.
+    """
+    def __init__(self, name=''):
+        """
+        Initialize a filter.
+
+        Initialize with the name of the logger which, together with its
+        children, will have its events allowed through the filter. If no
+        name is specified, allow every event.
+        """
+        self.name = name
+        self.nlen = len(name)
+
+    def filter(self, record):
+        """
+        Determine if the specified record is to be logged.
+
+        Is the specified record to be logged? Returns 0 for no, nonzero for
+        yes. If deemed appropriate, the record may be modified in-place.
+        """
+        if self.nlen == 0:
+            return 1
+        elif self.name == record.name:
+            return 1
+        elif record.name.find(self.name, 0, self.nlen) != 0:
+            return 0
+        return (record.name[self.nlen] == ".")
+
+class Filterer(object):
+    """
+    A base class for loggers and handlers which allows them to share
+    common code.
+    """
+    def __init__(self):
+        """
+        Initialize the list of filters to be an empty list.
+        """
+        self.filters = []
+
+    def addFilter(self, filter):
+        """
+        Add the specified filter to this handler.
+        """
+        if not (filter in self.filters):
+            self.filters.append(filter)
+
+    def removeFilter(self, filter):
+        """
+        Remove the specified filter from this handler.
+        """
+        if filter in self.filters:
+            self.filters.remove(filter)
+
+    def filter(self, record):
+        """
+        Determine if a record is loggable by consulting all the filters.
+
+        The default is to allow the record to be logged; any filter can veto
+        this and the record is then dropped. Returns a zero value if a record
+        is to be dropped, else non-zero.
+
+        .. versionchanged: 3.2
+
+           Allow filters to be just callables.
+        """
+        rv = 1
+        for f in self.filters:
+            if hasattr(f, 'filter'):
+                result = f.filter(record)
+            else:
+                result = f(record) # assume callable - will raise if not
+            if not result:
+                rv = 0
+                break
+        return rv
+
+#---------------------------------------------------------------------------
+#   Handler classes and functions
+#---------------------------------------------------------------------------
+
+any _handlers = weakref.WeakValueDictionary()  #map of handler names to handlers
+any _handlerList = [] # added to allow handlers to be removed in reverse of order initialized
+
+def _removeHandlerRef(wr):
+    """
+    Remove a handler reference from the internal cleanup list.
+    """
+    # This function can be called during module teardown, when globals are
+    # set to None. If _acquireLock is None, assume this is the case and do
+    # nothing.
+    if _acquireLock is not None:
+        _acquireLock()
+        try:
+            if wr in _handlerList:
+                _handlerList.remove(wr)
+        finally:
+            _releaseLock()
+
+def _addHandlerRef(handler):
+    """
+    Add a handler to the internal cleanup list using a weak reference.
+    """
+    _acquireLock()
+    try:
+        _handlerList.append(weakref.ref(handler, _removeHandlerRef))
+    finally:
+        _releaseLock()
+
+class Handler(Filterer):
+    """
+    Handler instances dispatch logging events to specific destinations.
+
+    The base handler class. Acts as a placeholder which defines the Handler
+    interface. Handlers can optionally use Formatter instances to format
+    records as desired. By default, no formatter is specified; in this case,
+    the 'raw' message as determined by record.message is logged.
+    """
+    def __init__(self, level=NOTSET):
+        """
+        Initializes the instance - basically setting the formatter to None
+        and the filter list to empty.
+        """
+        Filterer.__init__(self)
+        self._name = None
+        self.level = _checkLevel(level)
+        self.formatter = None
+        # Add the handler to the global _handlerList (for cleanup on shutdown)
+        _addHandlerRef(self)
+        self.createLock()
+
+    def get_name(self):
+        return self._name
+
+    def set_name(self, name):
+        _acquireLock()
+        try:
+            if self._name in _handlers:
+                del _handlers[self._name]
+            self._name = name
+            if name:
+                _handlers[name] = self
+        finally:
+            _releaseLock()
+
+    #name = property(get_name, set_name)
+
+    def createLock(self):
+        """
+        Acquire a thread lock for serializing access to the underlying I/O.
+        """
+        if thread:
+            self.lock = threading.RLock()
+        else:
+            self.lock = None
+
+    def acquire(self):
+        """
+        Acquire the I/O thread lock.
+        """
+        if self.lock:
+            self.lock.acquire()
+
+    def release(self):
+        """
+        Release the I/O thread lock.
+        """
+        if self.lock:
+            self.lock.release()
+
+    def setLevel(self, level):
+        """
+        Set the logging level of this handler.
+        """
+        self.level = _checkLevel(level)
+
+    def format(self, record):
+        """
+        Format the specified record.
+
+        If a formatter is set, use it. Otherwise, use the default formatter
+        for the module.
+        """
+        if self.formatter:
+            fmt = self.formatter
+        else:
+            fmt = _defaultFormatter
+        return fmt.format(record)
+
+    def emit(self, record):
+        """
+        Do whatever it takes to actually log the specified logging record.
+
+        This version is intended to be implemented by subclasses and so
+        raises a NotImplementedError.
+        """
+        raise NotImplementedError('emit must be implemented '
+                                  'by Handler subclasses')
+
+    def handle(self, record):
+        """
+        Conditionally emit the specified logging record.
+
+        Emission depends on filters which may have been added to the handler.
+        Wrap the actual emission of the record with acquisition/release of
+        the I/O thread lock. Returns whether the filter passed the record for
+        emission.
+        """
+        rv = self.filter(record)
+        if rv:
+            self.acquire()
+            try:
+                self.emit(record)
+            finally:
+                self.release()
+        return rv
+
+    def setFormatter(self, fmt):
+        """
+        Set the formatter for this handler.
+        """
+        self.formatter = fmt
+
+    def flush(self):
+        """
+        Ensure all logging output has been flushed.
+
+        This version does nothing and is intended to be implemented by
+        subclasses.
+        """
+        pass
+
+    def close(self):
+        """
+        Tidy up any resources used by the handler.
+
+        This version removes the handler from an internal map of handlers,
+        _handlers, which is used for handler lookup by name. Subclasses
+        should ensure that this gets called from overridden close()
+        methods.
+        """
+        #get the module data lock, as we're updating a shared structure.
+        _acquireLock()
+        try:    #unlikely to raise an exception, but you never know...
+            if self._name and self._name in _handlers:
+                del _handlers[self._name]
+        finally:
+            _releaseLock()
+
+    def handleError(self, record):
+        """
+        Handle errors which occur during an emit() call.
+
+        This method should be called from handlers when an exception is
+        encountered during an emit() call. If raiseExceptions is false,
+        exceptions get silently ignored. This is what is mostly wanted
+        for a logging system - most users will not care about errors in
+        the logging system, they are more interested in application errors.
+        You could, however, replace this with a custom handler if you wish.
+        The record which was being processed is passed in to this method.
+        """
+        if raiseExceptions and sys.stderr:  # see issue 13807
+            ei = sys.exc_info()
+            try:
+                traceback.print_exception(ei[0], ei[1], ei[2],
+                                          None, sys.stderr)
+                sys.stderr.write('Logged from file %s, line %s\n' % (
+                                 record.filename, record.lineno))
+            except IOError:
+                pass    # see issue 5971
+            finally:
+                ei = None
+
+class StreamHandler(Handler):
+    """
+    A handler class which writes logging records, appropriately formatted,
+    to a stream. Note that this class does not close the stream, as
+    sys.stdout or sys.stderr may be used.
+    """
+
+    terminator = '\n'
+
+    def __init__(self, stream=None):
+        """
+        Initialize the handler.
+
+        If stream is not specified, sys.stderr is used.
+        """
+        Handler.__init__(self)
+        if stream is None:
+            stream = sys.stderr
+        self.stream = stream
+
+    def flush(self):
+        """
+        Flushes the stream.
+        """
+        if self.stream and hasattr(self.stream, "flush"):
+            self.stream.flush()
+
+    def emit(self, record):
+        """
+        Emit a record.
+
+        If a formatter is specified, it is used to format the record.
+        The record is then written to the stream with a trailing newline.  If
+        exception information is present, it is formatted using
+        traceback.print_exception and appended to the stream.  If the stream
+        has an 'encoding' attribute, it is used to determine how to do the
+        output to the stream.
+        """
+        try:
+            msg = self.format(record)
+            stream = self.stream
+            stream.write(msg)
+            stream.write(self.terminator)
+            self.flush()
+        except (KeyboardInterrupt, SystemExit):
+            raise
+        except:
+            self.handleError(record)
+
+class FileHandler(StreamHandler):
+    """
+    A handler class which writes formatted logging records to disk files.
+    """
+    def __init__(self, filename, mode='a', encoding=None, delay=0):
+        """
+        Open the specified file and use it as the stream for logging.
+        """
+        #keep the absolute path, otherwise derived classes which use this
+        #may come a cropper when the current directory changes
+        if codecs is None:
+            encoding = None
+        self.baseFilename = os.path.abspath(filename)
+        self.mode = mode
+        self.encoding = encoding
+        if delay:
+            #We don't open the stream, but we still need to call the
+            #Handler constructor to set level, formatter, lock etc.
+            Handler.__init__(self)
+            self.stream = None
+        else:
+            StreamHandler.__init__(self, self._open())
+
+    def close(self):
+        """
+        Closes the stream.
+        """
+        if self.stream:
+            self.flush()
+            if hasattr(self.stream, "close"):
+                self.stream.close()
+            StreamHandler.close(self)
+            self.stream = None
+
+    def _open(self):
+        """
+        Open the current base file with the (original) mode and encoding.
+        Return the resulting stream.
+        """
+        if self.encoding is None:
+            stream = open(self.baseFilename, self.mode)
+        else:
+            stream = codecs.open(self.baseFilename, self.mode, self.encoding)
+        return stream
+
+    def emit(self, record):
+        """
+        Emit a record.
+
+        If the stream was not opened because 'delay' was specified in the
+        constructor, open it before calling the superclass's emit.
+        """
+        if self.stream is None:
+            self.stream = self._open()
+        StreamHandler.emit(self, record)
+
+class _StderrHandler(StreamHandler):
+    """
+    This class is like a StreamHandler using sys.stderr, but always uses
+    whatever sys.stderr is currently set to rather than the value of
+    sys.stderr at handler construction time.
+    """
+    def __init__(self, level=NOTSET):
+        """
+        Initialize the handler.
+        """
+        Handler.__init__(self, level)
+
+    #@property
+    #def stream(self):
+    #    return sys.stderr
+
+
+_defaultLastResort = _StderrHandler(WARNING)
+lastResort = _defaultLastResort
+
+#---------------------------------------------------------------------------
+#   Manager classes and functions
+#---------------------------------------------------------------------------
+
+class PlaceHolder(object):
+    """
+    PlaceHolder instances are used in the Manager logger hierarchy to take
+    the place of nodes for which no loggers have been defined. This class is
+    intended for internal use only and not as part of the public API.
+    """
+    def __init__(self, alogger):
+        """
+        Initialize with the specified logger being a child of this placeholder.
+        """
+        self.loggerMap = { alogger : None }
+
+    def append(self, alogger):
+        """
+        Add the specified logger as a child of this placeholder.
+        """
+        if alogger not in self.loggerMap:
+            self.loggerMap[alogger] = None
+
+#
+#   Determine which class to use when instantiating loggers.
+#
+any _loggerClass = None
+
+def setLoggerClass(klass):
+    """
+    Set the class to be used when instantiating a logger. The class should
+    define __init__() such that only a name argument is required, and the
+    __init__() should call Logger.__init__()
+    """
+    if klass != Logger:
+        if not issubclass(klass, Logger):
+            raise TypeError("logger not derived from logging.Logger: "
+                            + klass.__name__)
+    global _loggerClass
+    _loggerClass = klass
+
+def getLoggerClass():
+    """
+    Return the class to be used when instantiating a logger.
+    """
+
+    return _loggerClass
+
+class Manager(object):
+    """
+    There is [under normal circumstances] just one Manager instance, which
+    holds the hierarchy of loggers.
+    """
+    def __init__(self, rootnode):
+        """
+        Initialize the manager with the root node of the logger hierarchy.
+        """
+        self.root = rootnode
+        self.disable = 0
+        self.emittedNoHandlerWarning = False
+        self.loggerDict = {}
+        self.loggerClass = None
+        self.logRecordFactory = None
+
+    def getLogger(self, name):
+        """
+        Get a logger with the specified name (channel name), creating it
+        if it doesn't yet exist. This name is a dot-separated hierarchical
+        name, such as "a", "a.b", "a.b.c" or similar.
+
+        If a PlaceHolder existed for the specified name [i.e. the logger
+        didn't exist but a child of it did], replace it with the created
+        logger and fix up the parent/child references which pointed to the
+        placeholder to now point to the logger.
+        """
+        rv = None
+        if not isinstance(name, str):
+            raise TypeError('A logger name must be a string')
+        _acquireLock()
+        try:
+            if name in self.loggerDict:
+                rv = self.loggerDict[name]
+                if isinstance(rv, PlaceHolder):
+                    ph = rv
+                    rv = (self.loggerClass or _loggerClass)(name)
+                    rv.manager = self
+                    self.loggerDict[name] = rv
+                    self._fixupChildren(ph, rv)
+                    self._fixupParents(rv)
+            else:
+                rv = (self.loggerClass or _loggerClass)(name)
+                rv.manager = self
+                self.loggerDict[name] = rv
+                self._fixupParents(rv)
+        finally:
+            _releaseLock()
+        return rv
+
+    def setLoggerClass(self, klass):
+        """
+        Set the class to be used when instantiating a logger with this Manager.
+        """
+        if klass != Logger:
+            if not issubclass(klass, Logger):
+                raise TypeError("logger not derived from logging.Logger: "
+                                + klass.__name__)
+        self.loggerClass = klass
+
+    def setLogRecordFactory(self, factory):
+        """
+        Set the factory to be used when instantiating a log record with this
+        Manager.
+        """
+        self.logRecordFactory = factory
+
+    def _fixupParents(self, alogger):
+        """
+        Ensure that there are either loggers or placeholders all the way
+        from the specified logger to the root of the logger hierarchy.
+        """
+        name = alogger.name
+        i = name.rfind(".")
+        rv = None
+        while (i > 0) and not rv:
+            substr = name[:i]
+            if substr not in self.loggerDict:
+                self.loggerDict[substr] = PlaceHolder(alogger)
+            else:
+                obj = self.loggerDict[substr]
+                if isinstance(obj, Logger):
+                    rv = obj
+                else:
+                    assert isinstance(obj, PlaceHolder)
+                    obj.append(alogger)
+            i = name.rfind(".", 0, i - 1)
+        if not rv:
+            rv = self.root
+        alogger.parent = rv
+
+    def _fixupChildren(self, ph, alogger):
+        """
+        Ensure that children of the placeholder ph are connected to the
+        specified logger.
+        """
+        name = alogger.name
+        namelen = len(name)
+        for c in ph.loggerMap.keys():
+            #The if means ... if not c.parent.name.startswith(nm)
+            if c.parent.name[:namelen] != name:
+                alogger.parent = c.parent
+                c.parent = alogger
+
+#---------------------------------------------------------------------------
+#   Logger classes and functions
+#---------------------------------------------------------------------------
+
+class Logger(Filterer):
+    """
+    Instances of the Logger class represent a single logging channel. A
+    "logging channel" indicates an area of an application. Exactly how an
+    "area" is defined is up to the application developer. Since an
+    application can have any number of areas, logging channels are identified
+    by a unique string. Application areas can be nested (e.g. an area
+    of "input processing" might include sub-areas "read CSV files", "read
+    XLS files" and "read Gnumeric files"). To cater for this natural nesting,
+    channel names are organized into a namespace hierarchy where levels are
+    separated by periods, much like the Java or Python package namespace. So
+    in the instance given above, channel names might be "input" for the upper
+    level, and "input.csv", "input.xls" and "input.gnu" for the sub-levels.
+    There is no arbitrary limit to the depth of nesting.
+    """
+
+    any root
+    any manager
+
+    def __init__(self, name, level=NOTSET):
+        """
+        Initialize the logger with a name and an optional level.
+        """
+        Filterer.__init__(self)
+        self.name = name
+        self.level = _checkLevel(level)
+        self.parent = None
+        self.propagate = 1
+        self.handlers = []
+        self.disabled = 0
+
+    def setLevel(self, level):
+        """
+        Set the logging level of this logger.
+        """
+        self.level = _checkLevel(level)
+
+    def debug(self, msg, *args, **kwargs):
+        """
+        Log 'msg % args' with severity 'DEBUG'.
+
+        To pass exception information, use the keyword argument exc_info with
+        a true value, e.g.
+
+        logger.debug("Houston, we have a %s", "thorny problem", exc_info=1)
+        """
+        if self.isEnabledFor(DEBUG):
+            self._log(DEBUG, msg, args, **kwargs)
+
+    def info(self, msg, *args, **kwargs):
+        """
+        Log 'msg % args' with severity 'INFO'.
+
+        To pass exception information, use the keyword argument exc_info with
+        a true value, e.g.
+
+        logger.info("Houston, we have a %s", "interesting problem", exc_info=1)
+        """
+        if self.isEnabledFor(INFO):
+            self._log(INFO, msg, args, **kwargs)
+
+    def warning(self, msg, *args, **kwargs):
+        """
+        Log 'msg % args' with severity 'WARNING'.
+
+        To pass exception information, use the keyword argument exc_info with
+        a true value, e.g.
+
+        logger.warning("Houston, we have a %s", "bit of a problem", exc_info=1)
+        """
+        if self.isEnabledFor(WARNING):
+            self._log(WARNING, msg, args, **kwargs)
+
+    warn = warning
+
+    def error(self, msg, *args, **kwargs):
+        """
+        Log 'msg % args' with severity 'ERROR'.
+
+        To pass exception information, use the keyword argument exc_info with
+        a true value, e.g.
+
+        logger.error("Houston, we have a %s", "major problem", exc_info=1)
+        """
+        if self.isEnabledFor(ERROR):
+            self._log(ERROR, msg, args, **kwargs)
+
+    def exception(self, msg, *args, **kwargs):
+        """
+        Convenience method for logging an ERROR with exception information.
+        """
+        kwargs['exc_info'] = True
+        self.error(msg, *args, **kwargs)
+
+    def critical(self, msg, *args, **kwargs):
+        """
+        Log 'msg % args' with severity 'CRITICAL'.
+
+        To pass exception information, use the keyword argument exc_info with
+        a true value, e.g.
+
+        logger.critical("Houston, we have a %s", "major disaster", exc_info=1)
+        """
+        if self.isEnabledFor(CRITICAL):
+            self._log(CRITICAL, msg, args, **kwargs)
+
+    fatal = critical
+
+    def log(self, level, msg, *args, **kwargs):
+        """
+        Log 'msg % args' with the integer severity 'level'.
+
+        To pass exception information, use the keyword argument exc_info with
+        a true value, e.g.
+
+        logger.log(level, "We have a %s", "mysterious problem", exc_info=1)
+        """
+        if not isinstance(level, int):
+            if raiseExceptions:
+                raise TypeError("level must be an integer")
+            else:
+                return
+        if self.isEnabledFor(level):
+            self._log(level, msg, args, **kwargs)
+
+    def findCaller(self, stack_info=False):
+        """
+        Find the stack frame of the caller so that we can note the source
+        file name, line number and function name.
+        """
+        f = currentframe()
+        #On some versions of IronPython, currentframe() returns None if
+        #IronPython isn't run with -X:Frames.
+        if f is not None:
+            f = f.f_back
+        rv = "(unknown file)", 0, "(unknown function)", None
+        while hasattr(f, "f_code"):
+            co = f.f_code
+            filename = os.path.normcase(co.co_filename)
+            if filename == _srcfile:
+                f = f.f_back
+                continue
+            sinfo = None
+            if stack_info:
+                sio = io.StringIO()
+                sio.write('Stack (most recent call last):\n')
+                traceback.print_stack(f, file=sio)
+                sinfo = sio.getvalue()
+                if sinfo[-1] == '\n':
+                    sinfo = sinfo[:-1]
+                sio.close()
+            rv = (co.co_filename, f.f_lineno, co.co_name, sinfo)
+            break
+        return rv
+
+    def makeRecord(self, name, level, fn, lno, msg, args, exc_info,
+                   func=None, extra=None, sinfo=None):
+        """
+        A factory method which can be overridden in subclasses to create
+        specialized LogRecords.
+        """
+        rv = _logRecordFactory(name, level, fn, lno, msg, args, exc_info, func,
+                             sinfo)
+        if extra is not None:
+            for key in extra:
+                if (key in ["message", "asctime"]) or (key in rv.__dict__):
+                    raise KeyError("Attempt to overwrite %r in LogRecord" % key)
+                rv.__dict__[key] = extra[key]
+        return rv
+
+    def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False):
+        """
+        Low-level logging routine which creates a LogRecord and then calls
+        all the handlers of this logger to handle the record.
+        """
+        sinfo = None
+        if _srcfile:
+            #IronPython doesn't track Python frames, so findCaller throws an
+            #exception on some versions of IronPython. We trap it here so that
+            #IronPython can use logging.
+            try:
+                fn, lno, func, sinfo = self.findCaller(stack_info)
+            except ValueError:
+                fn, lno, func = "(unknown file)", 0, "(unknown function)"
+        else:
+            fn, lno, func = "(unknown file)", 0, "(unknown function)"
+        if exc_info:
+            if not isinstance(exc_info, tuple):
+                exc_info = sys.exc_info()
+        record = self.makeRecord(self.name, level, fn, lno, msg, args,
+                                 exc_info, func, extra, sinfo)
+        self.handle(record)
+
+    def handle(self, record):
+        """
+        Call the handlers for the specified record.
+
+        This method is used for unpickled records received from a socket, as
+        well as those created locally. Logger-level filtering is applied.
+        """
+        if (not self.disabled) and self.filter(record):
+            self.callHandlers(record)
+
+    def addHandler(self, hdlr):
+        """
+        Add the specified handler to this logger.
+        """
+        _acquireLock()
+        try:
+            if not (hdlr in self.handlers):
+                self.handlers.append(hdlr)
+        finally:
+            _releaseLock()
+
+    def removeHandler(self, hdlr):
+        """
+        Remove the specified handler from this logger.
+        """
+        _acquireLock()
+        try:
+            if hdlr in self.handlers:
+                self.handlers.remove(hdlr)
+        finally:
+            _releaseLock()
+
+    def hasHandlers(self):
+        """
+        See if this logger has any handlers configured.
+
+        Loop through all handlers for this logger and its parents in the
+        logger hierarchy. Return True if a handler was found, else False.
+        Stop searching up the hierarchy whenever a logger with the "propagate"
+        attribute set to zero is found - that will be the last logger which
+        is checked for the existence of handlers.
+        """
+        c = self
+        rv = False
+        while c:
+            if c.handlers:
+                rv = True
+                break
+            if not c.propagate:
+                break
+            else:
+                c = c.parent
+        return rv
+
+    def callHandlers(self, record):
+        """
+        Pass a record to all relevant handlers.
+
+        Loop through all handlers for this logger and its parents in the
+        logger hierarchy. If no handler was found, output a one-off error
+        message to sys.stderr. Stop searching up the hierarchy whenever a
+        logger with the "propagate" attribute set to zero is found - that
+        will be the last logger whose handlers are called.
+        """
+        c = self
+        found = 0
+        while c:
+            for hdlr in c.handlers:
+                found = found + 1
+                if record.levelno >= hdlr.level:
+                    hdlr.handle(record)
+            if not c.propagate:
+                c = None    #break out
+            else:
+                c = c.parent
+        if (found == 0):
+            if lastResort:
+                if record.levelno >= lastResort.level:
+                    lastResort.handle(record)
+            elif raiseExceptions and not self.manager.emittedNoHandlerWarning:
+                sys.stderr.write("No handlers could be found for logger"
+                                 " \"%s\"\n" % self.name)
+                self.manager.emittedNoHandlerWarning = True
+
+    def getEffectiveLevel(self):
+        """
+        Get the effective level for this logger.
+
+        Loop through this logger and its parents in the logger hierarchy,
+        looking for a non-zero logging level. Return the first one found.
+        """
+        logger = self
+        while logger:
+            if logger.level:
+                return logger.level
+            logger = logger.parent
+        return NOTSET
+
+    def isEnabledFor(self, level):
+        """
+        Is this logger enabled for level 'level'?
+        """
+        if self.manager.disable >= level:
+            return 0
+        return level >= self.getEffectiveLevel()
+
+    def getChild(self, suffix):
+        """
+        Get a logger which is a descendant to this one.
+
+        This is a convenience method, such that
+
+        logging.getLogger('abc').getChild('def.ghi')
+
+        is the same as
+
+        logging.getLogger('abc.def.ghi')
+
+        It's useful, for example, when the parent logger is named using
+        __name__ rather than a literal string.
+        """
+        if self.root is not self:
+            suffix = '.'.join((self.name, suffix))
+        return self.manager.getLogger(suffix)
+
+class RootLogger(Logger):
+    """
+    A root logger is not that different to any other logger, except that
+    it must have a logging level and there is only one instance of it in
+    the hierarchy.
+    """
+    def __init__(self, level):
+        """
+        Initialize the logger with the name "root".
+        """
+        Logger.__init__(self, "root", level)
+
+_loggerClass = Logger
+
+class LoggerAdapter(object):
+    """
+    An adapter for loggers which makes it easier to specify contextual
+    information in logging output.
+    """
+
+    def __init__(self, logger, extra):
+        """
+        Initialize the adapter with a logger and a dict-like object which
+        provides contextual information. This constructor signature allows
+        easy stacking of LoggerAdapters, if so desired.
+
+        You can effectively pass keyword arguments as shown in the
+        following example:
+
+        adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2"))
+        """
+        self.logger = logger
+        self.extra = extra
+
+    def process(self, msg, kwargs):
+        """
+        Process the logging message and keyword arguments passed in to
+        a logging call to insert contextual information. You can either
+        manipulate the message itself, the keyword args or both. Return
+        the message and kwargs modified (or not) to suit your needs.
+
+        Normally, you'll only need to override this one method in a
+        LoggerAdapter subclass for your specific needs.
+        """
+        kwargs["extra"] = self.extra
+        return msg, kwargs
+
+    #
+    # Boilerplate convenience methods
+    #
+    def debug(self, msg, *args, **kwargs):
+        """
+        Delegate a debug call to the underlying logger.
+        """
+        self.log(DEBUG, msg, *args, **kwargs)
+
+    def info(self, msg, *args, **kwargs):
+        """
+        Delegate an info call to the underlying logger.
+        """
+        self.log(INFO, msg, *args, **kwargs)
+
+    def warning(self, msg, *args, **kwargs):
+        """
+        Delegate a warning call to the underlying logger.
+        """
+        self.log(WARNING, msg, *args, **kwargs)
+
+    warn = warning
+
+    def error(self, msg, *args, **kwargs):
+        """
+        Delegate an error call to the underlying logger.
+        """
+        self.log(ERROR, msg, *args, **kwargs)
+
+    def exception(self, msg, *args, **kwargs):
+        """
+        Delegate an exception call to the underlying logger.
+        """
+        kwargs["exc_info"] = 1
+        self.log(ERROR, msg, *args, **kwargs)
+
+    def critical(self, msg, *args, **kwargs):
+        """
+        Delegate a critical call to the underlying logger.
+        """
+        self.log(CRITICAL, msg, *args, **kwargs)
+
+    def log(self, level, msg, *args, **kwargs):
+        """
+        Delegate a log call to the underlying logger, after adding
+        contextual information from this adapter instance.
+        """
+        if self.isEnabledFor(level):
+            msg, kwargs = self.process(msg, kwargs)
+            self.logger._log(level, msg, args, **kwargs)
+
+    def isEnabledFor(self, level):
+        """
+        Is this logger enabled for level 'level'?
+        """
+        if self.logger.manager.disable >= level:
+            return False
+        return level >= self.getEffectiveLevel()
+
+    def setLevel(self, level):
+        """
+        Set the specified level on the underlying logger.
+        """
+        self.logger.setLevel(level)
+
+    def getEffectiveLevel(self):
+        """
+        Get the effective level for the underlying logger.
+        """
+        return self.logger.getEffectiveLevel()
+
+    def hasHandlers(self):
+        """
+        See if the underlying logger has any handlers.
+        """
+        return self.logger.hasHandlers()
+
+root = RootLogger(WARNING)
+Logger.root = root
+Logger.manager = Manager(Logger.root)
+
+#---------------------------------------------------------------------------
+# Configuration classes and functions
+#---------------------------------------------------------------------------
+
+BASIC_FORMAT = "%(levelname)s:%(name)s:%(message)s"
+
+def basicConfig(**kwargs):
+    """
+    Do basic configuration for the logging system.
+
+    This function does nothing if the root logger already has handlers
+    configured. It is a convenience method intended for use by simple scripts
+    to do one-shot configuration of the logging package.
+
+    The default behaviour is to create a StreamHandler which writes to
+    sys.stderr, set a formatter using the BASIC_FORMAT format string, and
+    add the handler to the root logger.
+
+    A number of optional keyword arguments may be specified, which can alter
+    the default behaviour.
+
+    filename  Specifies that a FileHandler be created, using the specified
+              filename, rather than a StreamHandler.
+    filemode  Specifies the mode to open the file, if filename is specified
+              (if filemode is unspecified, it defaults to 'a').
+    format    Use the specified format string for the handler.
+    datefmt   Use the specified date/time format.
+    style     If a format string is specified, use this to specify the
+              type of format string (possible values '%', '{', '$', for
+              %-formatting, :meth:`str.format` and :class:`string.Template`
+              - defaults to '%').
+    level     Set the root logger level to the specified level.
+    stream    Use the specified stream to initialize the StreamHandler. Note
+              that this argument is incompatible with 'filename' - if both
+              are present, 'stream' is ignored.
+
+    Note that you could specify a stream created using open(filename, mode)
+    rather than passing the filename and mode in. However, it should be
+    remembered that StreamHandler does not close its stream (since it may be
+    using sys.stdout or sys.stderr), whereas FileHandler closes its stream
+    when the handler is closed.
+
+    .. versionchanged: 3.2
+       Added the ``style`` parameter.
+    """
+    # Add thread safety in case someone mistakenly calls
+    # basicConfig() from multiple threads
+    _acquireLock()
+    try:
+        if len(root.handlers) == 0:
+            filename = kwargs.get("filename")
+            if filename:
+                mode = kwargs.get("filemode", 'a')
+                hdlr = FileHandler(filename, mode)
+            else:
+                stream = kwargs.get("stream")
+                hdlr = StreamHandler(stream)
+            fs = kwargs.get("format", BASIC_FORMAT)
+            dfs = kwargs.get("datefmt", None)
+            style = kwargs.get("style", '%')
+            fmt = Formatter(fs, dfs, style)
+            hdlr.setFormatter(fmt)
+            root.addHandler(hdlr)
+            level = kwargs.get("level")
+            if level is not None:
+                root.setLevel(level)
+    finally:
+        _releaseLock()
+
+#---------------------------------------------------------------------------
+# Utility functions at module level.
+# Basically delegate everything to the root logger.
+#---------------------------------------------------------------------------
+
+def getLogger(name=None):
+    """
+    Return a logger with the specified name, creating it if necessary.
+
+    If no name is specified, return the root logger.
+    """
+    if name:
+        return Logger.manager.getLogger(name)
+    else:
+        return root
+
+def critical(msg, *args, **kwargs):
+    """
+    Log a message with severity 'CRITICAL' on the root logger. If the logger
+    has no handlers, call basicConfig() to add a console handler with a
+    pre-defined format.
+    """
+    if len(root.handlers) == 0:
+        basicConfig()
+    root.critical(msg, *args, **kwargs)
+
+fatal = critical
+
+def error(msg, *args, **kwargs):
+    """
+    Log a message with severity 'ERROR' on the root logger. If the logger has
+    no handlers, call basicConfig() to add a console handler with a pre-defined
+    format.
+    """
+    if len(root.handlers) == 0:
+        basicConfig()
+    root.error(msg, *args, **kwargs)
+
+def exception(msg, *args, **kwargs):
+    """
+    Log a message with severity 'ERROR' on the root logger, with exception
+    information. If the logger has no handlers, basicConfig() is called to add
+    a console handler with a pre-defined format.
+    """
+    kwargs['exc_info'] = True
+    error(msg, *args, **kwargs)
+
+def warning(msg, *args, **kwargs):
+    """
+    Log a message with severity 'WARNING' on the root logger. If the logger has
+    no handlers, call basicConfig() to add a console handler with a pre-defined
+    format.
+    """
+    if len(root.handlers) == 0:
+        basicConfig()
+    root.warning(msg, *args, **kwargs)
+
+warn = warning
+
+def info(msg, *args, **kwargs):
+    """
+    Log a message with severity 'INFO' on the root logger. If the logger has
+    no handlers, call basicConfig() to add a console handler with a pre-defined
+    format.
+    """
+    if len(root.handlers) == 0:
+        basicConfig()
+    root.info(msg, *args, **kwargs)
+
+def debug(msg, *args, **kwargs):
+    """
+    Log a message with severity 'DEBUG' on the root logger. If the logger has
+    no handlers, call basicConfig() to add a console handler with a pre-defined
+    format.
+    """
+    if len(root.handlers) == 0:
+        basicConfig()
+    root.debug(msg, *args, **kwargs)
+
+def log(level, msg, *args, **kwargs):
+    """
+    Log 'msg % args' with the integer severity 'level' on the root logger. If
+    the logger has no handlers, call basicConfig() to add a console handler
+    with a pre-defined format.
+    """
+    if len(root.handlers) == 0:
+        basicConfig()
+    root.log(level, msg, *args, **kwargs)
+
+def disable(level):
+    """
+    Disable all logging calls of severity 'level' and below.
+    """
+    root.manager.disable = level
+
+def shutdown(handlerList=_handlerList):
+    """
+    Perform any cleanup actions in the logging system (e.g. flushing
+    buffers).
+
+    Should be called at application exit.
+    """
+    for wr in reversed(handlerList[:]):
+        #errors might occur, for example, if files are locked
+        #we just ignore them if raiseExceptions is not set
+        try:
+            h = wr()
+            if h:
+                try:
+                    h.acquire()
+                    h.flush()
+                    h.close()
+                except (IOError, ValueError):
+                    # Ignore errors which might be caused
+                    # because handlers have been closed but
+                    # references to them are still around at
+                    # application exit.
+                    pass
+                finally:
+                    h.release()
+        except:
+            if raiseExceptions:
+                raise
+            #else, swallow
+
+#Let's try and shutdown automatically on application exit...
+import atexit
+atexit.register(shutdown)
+
+# Null handler
+
+class NullHandler(Handler):
+    """
+    This handler does nothing. It's intended to be used to avoid the
+    "No handlers could be found for logger XXX" one-off warning. This is
+    important for library code, which may contain code to log events. If a user
+    of the library does not configure logging, the one-off warning might be
+    produced; to avoid this, the library developer simply needs to instantiate
+    a NullHandler and add it to the top-level logger of the library module or
+    package.
+    """
+    def handle(self, record):
+        pass
+
+    def emit(self, record):
+        pass
+
+    def createLock(self):
+        self.lock = None
+
+# Warnings integration
+
+any _warnings_showwarning = None
+
+def _showwarning(message, category, filename, lineno, file=None, line=None):
+    """
+    Implementation of showwarnings which redirects to logging, which will first
+    check to see if the file parameter is None. If a file is specified, it will
+    delegate to the original warnings implementation of showwarning. Otherwise,
+    it will call warnings.formatwarning and will log the resulting string to a
+    warnings logger named "py.warnings" with level logging.WARNING.
+    """
+    if file is not None:
+        if _warnings_showwarning is not None:
+            _warnings_showwarning(message, category, filename, lineno, file, line)
+    else:
+        s = warnings.formatwarning(message, category, filename, lineno, line)
+        logger = getLogger("py.warnings")
+        if not logger.handlers:
+            logger.addHandler(NullHandler())
+        logger.warning("%s", s)
+
+def captureWarnings(capture):
+    """
+    If capture is true, redirect all warnings to the logging package.
+    If capture is False, ensure that warnings are not redirected to logging
+    but to their original destinations.
+    """
+    global _warnings_showwarning
+    if capture:
+        if _warnings_showwarning is None:
+            _warnings_showwarning = warnings.showwarning
+            warnings.showwarning = _showwarning
+    else:
+        if _warnings_showwarning is not None:
+            warnings.showwarning = _warnings_showwarning
+            _warnings_showwarning = None
diff --git a/typeshed/stdlib/3.3/xml/__init__.pyi b/test-data/stdlib-samples/3.2/incomplete/urllib/__init__.py
similarity index 100%
rename from typeshed/stdlib/3.3/xml/__init__.pyi
rename to test-data/stdlib-samples/3.2/incomplete/urllib/__init__.py
diff --git a/test-data/stdlib-samples/3.2/incomplete/urllib/parse.py b/test-data/stdlib-samples/3.2/incomplete/urllib/parse.py
new file mode 100644
index 0000000..4fa65c4
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/incomplete/urllib/parse.py
@@ -0,0 +1,980 @@
+"""Parse (absolute and relative) URLs.
+
+urlparse module is based upon the following RFC specifications.
+
+RFC 3986 (STD66): "Uniform Resource Identifiers" by T. Berners-Lee, R. Fielding
+and L.  Masinter, January 2005.
+
+RFC 2732 : "Format for Literal IPv6 Addresses in URLs" by R.Hinden, B.Carpenter
+and L.Masinter, December 1999.
+
+RFC 2396:  "Uniform Resource Identifiers (URI)": Generic Syntax by T.
+Berners-Lee, R. Fielding, and L. Masinter, August 1998.
+
+RFC 2368: "The mailto URL scheme", by P.Hoffman , L Masinter, J. Zawinski, July 1998.
+
+RFC 1808: "Relative Uniform Resource Locators", by R. Fielding, UC Irvine, June
+1995.
+
+RFC 1738: "Uniform Resource Locators (URL)" by T. Berners-Lee, L. Masinter, M.
+McCahill, December 1994
+
+RFC 3986 is considered the current standard and any future changes to
+urlparse module should conform with it.  The urlparse module is
+currently not entirely compliant with this RFC due to defacto
+scenarios for parsing, and for backward compatibility purposes, some
+parsing quirks from older RFCs are retained. The testcases in
+test_urlparse.py provides a good indicator of parsing behavior.
+"""
+
+import sys
+import collections
+
+__all__ = ["urlparse", "urlunparse", "urljoin", "urldefrag",
+           "urlsplit", "urlunsplit", "urlencode", "parse_qs",
+           "parse_qsl", "quote", "quote_plus", "quote_from_bytes",
+           "unquote", "unquote_plus", "unquote_to_bytes"]
+
+# A classification of schemes ('' means apply by default)
+uses_relative = ['ftp', 'http', 'gopher', 'nntp', 'imap',
+                 'wais', 'file', 'https', 'shttp', 'mms',
+                 'prospero', 'rtsp', 'rtspu', '', 'sftp',
+                 'svn', 'svn+ssh']
+uses_netloc = ['ftp', 'http', 'gopher', 'nntp', 'telnet',
+               'imap', 'wais', 'file', 'mms', 'https', 'shttp',
+               'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '',
+               'svn', 'svn+ssh', 'sftp', 'nfs', 'git', 'git+ssh']
+non_hierarchical = ['gopher', 'hdl', 'mailto', 'news',
+                    'telnet', 'wais', 'imap', 'snews', 'sip', 'sips']
+uses_params = ['ftp', 'hdl', 'prospero', 'http', 'imap',
+               'https', 'shttp', 'rtsp', 'rtspu', 'sip', 'sips',
+               'mms', '', 'sftp']
+uses_query = ['http', 'wais', 'imap', 'https', 'shttp', 'mms',
+              'gopher', 'rtsp', 'rtspu', 'sip', 'sips', '']
+uses_fragment = ['ftp', 'hdl', 'http', 'gopher', 'news',
+                 'nntp', 'wais', 'https', 'shttp', 'snews',
+                 'file', 'prospero', '']
+
+# Characters valid in scheme names
+scheme_chars = ('abcdefghijklmnopqrstuvwxyz'
+                'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+                '0123456789'
+                '+-.')
+
+# XXX: Consider replacing with functools.lru_cache
+MAX_CACHE_SIZE = 20
+_parse_cache = {}
+
+def clear_cache():
+    """Clear the parse cache and the quoters cache."""
+    _parse_cache.clear()
+    _safe_quoters.clear()
+
+
+# Helpers for bytes handling
+# For 3.2, we deliberately require applications that
+# handle improperly quoted URLs to do their own
+# decoding and encoding. If valid use cases are
+# presented, we may relax this by using latin-1
+# decoding internally for 3.3
+_implicit_encoding = 'ascii'
+_implicit_errors = 'strict'
+
+def _noop(obj):
+    return obj
+
+def _encode_result(obj, encoding=_implicit_encoding,
+                        errors=_implicit_errors):
+    return obj.encode(encoding, errors)
+
+def _decode_args(args, encoding=_implicit_encoding,
+                       errors=_implicit_errors):
+    return tuple(x.decode(encoding, errors) if x else '' for x in args)
+
+def _coerce_args(*args):
+    # Invokes decode if necessary to create str args
+    # and returns the coerced inputs along with
+    # an appropriate result coercion function
+    #   - noop for str inputs
+    #   - encoding function otherwise
+    str_input = isinstance(args[0], str)
+    for arg in args[1:]:
+        # We special-case the empty string to support the
+        # "scheme=''" default argument to some functions
+        if arg and isinstance(arg, str) != str_input:
+            raise TypeError("Cannot mix str and non-str arguments")
+    if str_input:
+        return args + (_noop,)
+    return _decode_args(args) + (_encode_result,)
+
+# Result objects are more helpful than simple tuples
+class _ResultMixinStr(object):
+    """Standard approach to encoding parsed results from str to bytes"""
+    __slots__ = ()
+
+    def encode(self, encoding='ascii', errors='strict'):
+        return self._encoded_counterpart(*(x.encode(encoding, errors) for x in self))
+
+
+class _ResultMixinBytes(object):
+    """Standard approach to decoding parsed results from bytes to str"""
+    __slots__ = ()
+
+    def decode(self, encoding='ascii', errors='strict'):
+        return self._decoded_counterpart(*(x.decode(encoding, errors) for x in self))
+
+
+class _NetlocResultMixinBase(object):
+    """Shared methods for the parsed result objects containing a netloc element"""
+    __slots__ = ()
+
+    @property
+    def username(self):
+        return self._userinfo[0]
+
+    @property
+    def password(self):
+        return self._userinfo[1]
+
+    @property
+    def hostname(self):
+        hostname = self._hostinfo[0]
+        if not hostname:
+            hostname = None
+        elif hostname is not None:
+            hostname = hostname.lower()
+        return hostname
+
+    @property
+    def port(self):
+        port = self._hostinfo[1]
+        if port is not None:
+            port = int(port, 10)
+        return port
+
+
+class _NetlocResultMixinStr(_NetlocResultMixinBase, _ResultMixinStr):
+    __slots__ = ()
+
+    @property
+    def _userinfo(self):
+        netloc = self.netloc
+        userinfo, have_info, hostinfo = netloc.rpartition('@')
+        if have_info:
+            username, have_password, password = userinfo.partition(':')
+            if not have_password:
+                password = None
+        else:
+            username = password = None
+        return username, password
+
+    @property
+    def _hostinfo(self):
+        netloc = self.netloc
+        _, _, hostinfo = netloc.rpartition('@')
+        _, have_open_br, bracketed = hostinfo.partition('[')
+        if have_open_br:
+            hostname, _, port = bracketed.partition(']')
+            _, have_port, port = port.partition(':')
+        else:
+            hostname, have_port, port = hostinfo.partition(':')
+        if not have_port:
+            port = None
+        return hostname, port
+
+
+class _NetlocResultMixinBytes(_NetlocResultMixinBase, _ResultMixinBytes):
+    __slots__ = ()
+
+    @property
+    def _userinfo(self):
+        netloc = self.netloc
+        userinfo, have_info, hostinfo = netloc.rpartition(b'@')
+        if have_info:
+            username, have_password, password = userinfo.partition(b':')
+            if not have_password:
+                password = None
+        else:
+            username = password = None
+        return username, password
+
+    @property
+    def _hostinfo(self):
+        netloc = self.netloc
+        _, _, hostinfo = netloc.rpartition(b'@')
+        _, have_open_br, bracketed = hostinfo.partition(b'[')
+        if have_open_br:
+            hostname, _, port = bracketed.partition(b']')
+            _, have_port, port = port.partition(b':')
+        else:
+            hostname, have_port, port = hostinfo.partition(b':')
+        if not have_port:
+            port = None
+        return hostname, port
+
+
+from collections import namedtuple
+
+_DefragResultBase = namedtuple('DefragResult', 'url fragment')
+_SplitResultBase = namedtuple('SplitResult', 'scheme netloc path query fragment')
+_ParseResultBase = namedtuple('ParseResult', 'scheme netloc path params query fragment')
+
+# For backwards compatibility, alias _NetlocResultMixinStr
+# ResultBase is no longer part of the documented API, but it is
+# retained since deprecating it isn't worth the hassle
+ResultBase = _NetlocResultMixinStr
+
+# Structured result objects for string data
+class DefragResult(_DefragResultBase, _ResultMixinStr):
+    __slots__ = ()
+    def geturl(self):
+        if self.fragment:
+            return self.url + '#' + self.fragment
+        else:
+            return self.url
+
+class SplitResult(_SplitResultBase, _NetlocResultMixinStr):
+    __slots__ = ()
+    def geturl(self):
+        return urlunsplit(self)
+
+class ParseResult(_ParseResultBase, _NetlocResultMixinStr):
+    __slots__ = ()
+    def geturl(self):
+        return urlunparse(self)
+
+# Structured result objects for bytes data
+class DefragResultBytes(_DefragResultBase, _ResultMixinBytes):
+    __slots__ = ()
+    def geturl(self):
+        if self.fragment:
+            return self.url + b'#' + self.fragment
+        else:
+            return self.url
+
+class SplitResultBytes(_SplitResultBase, _NetlocResultMixinBytes):
+    __slots__ = ()
+    def geturl(self):
+        return urlunsplit(self)
+
+class ParseResultBytes(_ParseResultBase, _NetlocResultMixinBytes):
+    __slots__ = ()
+    def geturl(self):
+        return urlunparse(self)
+
+# Set up the encode/decode result pairs
+def _fix_result_transcoding():
+    _result_pairs = (
+        (DefragResult, DefragResultBytes),
+        (SplitResult, SplitResultBytes),
+        (ParseResult, ParseResultBytes),
+    )
+    for _decoded, _encoded in _result_pairs:
+        _decoded._encoded_counterpart = _encoded
+        _encoded._decoded_counterpart = _decoded
+
+_fix_result_transcoding()
+del _fix_result_transcoding
+
+def urlparse(url, scheme='', allow_fragments=True):
+    """Parse a URL into 6 components:
+    <scheme>://<netloc>/<path>;<params>?<query>#<fragment>
+    Return a 6-tuple: (scheme, netloc, path, params, query, fragment).
+    Note that we don't break the components up in smaller bits
+    (e.g. netloc is a single string) and we don't expand % escapes."""
+    url, scheme, _coerce_result = _coerce_args(url, scheme)
+    tuple = urlsplit(url, scheme, allow_fragments)
+    scheme, netloc, url, query, fragment = tuple
+    if scheme in uses_params and ';' in url:
+        url, params = _splitparams(url)
+    else:
+        params = ''
+    result = ParseResult(scheme, netloc, url, params, query, fragment)
+    return _coerce_result(result)
+
+def _splitparams(url):
+    if '/'  in url:
+        i = url.find(';', url.rfind('/'))
+        if i < 0:
+            return url, ''
+    else:
+        i = url.find(';')
+    return url[:i], url[i+1:]
+
+def _splitnetloc(url, start=0):
+    delim = len(url)   # position of end of domain part of url, default is end
+    for c in '/?#':    # look for delimiters; the order is NOT important
+        wdelim = url.find(c, start)        # find first of this delim
+        if wdelim >= 0:                    # if found
+            delim = min(delim, wdelim)     # use earliest delim position
+    return url[start:delim], url[delim:]   # return (domain, rest)
+
+def urlsplit(url, scheme='', allow_fragments=True):
+    """Parse a URL into 5 components:
+    <scheme>://<netloc>/<path>?<query>#<fragment>
+    Return a 5-tuple: (scheme, netloc, path, query, fragment).
+    Note that we don't break the components up in smaller bits
+    (e.g. netloc is a single string) and we don't expand % escapes."""
+    url, scheme, _coerce_result = _coerce_args(url, scheme)
+    allow_fragments = bool(allow_fragments)
+    key = url, scheme, allow_fragments, type(url), type(scheme)
+    cached = _parse_cache.get(key, None)
+    if cached:
+        return _coerce_result(cached)
+    if len(_parse_cache) >= MAX_CACHE_SIZE: # avoid runaway growth
+        clear_cache()
+    netloc = query = fragment = ''
+    i = url.find(':')
+    if i > 0:
+        if url[:i] == 'http': # optimize the common case
+            scheme = url[:i].lower()
+            url = url[i+1:]
+            if url[:2] == '//':
+                netloc, url = _splitnetloc(url, 2)
+                if (('[' in netloc and ']' not in netloc) or
+                        (']' in netloc and '[' not in netloc)):
+                    raise ValueError("Invalid IPv6 URL")
+            if allow_fragments and '#' in url:
+                url, fragment = url.split('#', 1)
+            if '?' in url:
+                url, query = url.split('?', 1)
+            v = SplitResult(scheme, netloc, url, query, fragment)
+            _parse_cache[key] = v
+            return _coerce_result(v)
+        for c in url[:i]:
+            if c not in scheme_chars:
+                break
+        else:
+            try:
+                # make sure "url" is not actually a port number (in which case
+                # "scheme" is really part of the path
+                _testportnum = int(url[i+1:])
+            except ValueError:
+                scheme, url = url[:i].lower(), url[i+1:]
+
+    if url[:2] == '//':
+        netloc, url = _splitnetloc(url, 2)
+        if (('[' in netloc and ']' not in netloc) or
+                (']' in netloc and '[' not in netloc)):
+            raise ValueError("Invalid IPv6 URL")
+    if allow_fragments and scheme in uses_fragment and '#' in url:
+        url, fragment = url.split('#', 1)
+    if scheme in uses_query and '?' in url:
+        url, query = url.split('?', 1)
+    v = SplitResult(scheme, netloc, url, query, fragment)
+    _parse_cache[key] = v
+    return _coerce_result(v)
+
+def urlunparse(components):
+    """Put a parsed URL back together again.  This may result in a
+    slightly different, but equivalent URL, if the URL that was parsed
+    originally had redundant delimiters, e.g. a ? with an empty query
+    (the draft states that these are equivalent)."""
+    scheme, netloc, url, params, query, fragment, _coerce_result = (
+                                                  _coerce_args(*components))
+    if params:
+        url = "%s;%s" % (url, params)
+    return _coerce_result(urlunsplit((scheme, netloc, url, query, fragment)))
+
+def urlunsplit(components):
+    """Combine the elements of a tuple as returned by urlsplit() into a
+    complete URL as a string. The data argument can be any five-item iterable.
+    This may result in a slightly different, but equivalent URL, if the URL that
+    was parsed originally had unnecessary delimiters (for example, a ? with an
+    empty query; the RFC states that these are equivalent)."""
+    scheme, netloc, url, query, fragment, _coerce_result = (
+                                          _coerce_args(*components))
+    if netloc or (scheme and scheme in uses_netloc and url[:2] != '//'):
+        if url and url[:1] != '/': url = '/' + url
+        url = '//' + (netloc or '') + url
+    if scheme:
+        url = scheme + ':' + url
+    if query:
+        url = url + '?' + query
+    if fragment:
+        url = url + '#' + fragment
+    return _coerce_result(url)
+
+def urljoin(base, url, allow_fragments=True):
+    """Join a base URL and a possibly relative URL to form an absolute
+    interpretation of the latter."""
+    if not base:
+        return url
+    if not url:
+        return base
+    base, url, _coerce_result = _coerce_args(base, url)
+    bscheme, bnetloc, bpath, bparams, bquery, bfragment = \
+            urlparse(base, '', allow_fragments)
+    scheme, netloc, path, params, query, fragment = \
+            urlparse(url, bscheme, allow_fragments)
+    if scheme != bscheme or scheme not in uses_relative:
+        return _coerce_result(url)
+    if scheme in uses_netloc:
+        if netloc:
+            return _coerce_result(urlunparse((scheme, netloc, path,
+                                              params, query, fragment)))
+        netloc = bnetloc
+    if path[:1] == '/':
+        return _coerce_result(urlunparse((scheme, netloc, path,
+                                          params, query, fragment)))
+    if not path and not params:
+        path = bpath
+        params = bparams
+        if not query:
+            query = bquery
+        return _coerce_result(urlunparse((scheme, netloc, path,
+                                          params, query, fragment)))
+    segments = bpath.split('/')[:-1] + path.split('/')
+    # XXX The stuff below is bogus in various ways...
+    if segments[-1] == '.':
+        segments[-1] = ''
+    while '.' in segments:
+        segments.remove('.')
+    while 1:
+        i = 1
+        n = len(segments) - 1
+        while i < n:
+            if (segments[i] == '..'
+                and segments[i-1] not in ('', '..')):
+                del segments[i-1:i+1]
+                break
+            i = i+1
+        else:
+            break
+    if segments == ['', '..']:
+        segments[-1] = ''
+    elif len(segments) >= 2 and segments[-1] == '..':
+        segments[-2:] = ['']
+    return _coerce_result(urlunparse((scheme, netloc, '/'.join(segments),
+                                      params, query, fragment)))
+
+def urldefrag(url):
+    """Removes any existing fragment from URL.
+
+    Returns a tuple of the defragmented URL and the fragment.  If
+    the URL contained no fragments, the second element is the
+    empty string.
+    """
+    url, _coerce_result = _coerce_args(url)
+    if '#' in url:
+        s, n, p, a, q, frag = urlparse(url)
+        defrag = urlunparse((s, n, p, a, q, ''))
+    else:
+        frag = ''
+        defrag = url
+    return _coerce_result(DefragResult(defrag, frag))
+
+def unquote_to_bytes(string):
+    """unquote_to_bytes('abc%20def') -> b'abc def'."""
+    # Note: strings are encoded as UTF-8. This is only an issue if it contains
+    # unescaped non-ASCII characters, which URIs should not.
+    if not string:
+        # Is it a string-like object?
+        string.split
+        return b''
+    if isinstance(string, str):
+        string = string.encode('utf-8')
+    res = string.split(b'%')
+    if len(res) == 1:
+        return string
+    string = res[0]
+    for item in res[1:]:
+        try:
+            string += bytes([int(item[:2], 16)]) + item[2:]
+        except ValueError:
+            string += b'%' + item
+    return string
+
+def unquote(string, encoding='utf-8', errors='replace'):
+    """Replace %xx escapes by their single-character equivalent. The optional
+    encoding and errors parameters specify how to decode percent-encoded
+    sequences into Unicode characters, as accepted by the bytes.decode()
+    method.
+    By default, percent-encoded sequences are decoded with UTF-8, and invalid
+    sequences are replaced by a placeholder character.
+
+    unquote('abc%20def') -> 'abc def'.
+    """
+    if string == '':
+        return string
+    res = string.split('%')
+    if len(res) == 1:
+        return string
+    if encoding is None:
+        encoding = 'utf-8'
+    if errors is None:
+        errors = 'replace'
+    # pct_sequence: contiguous sequence of percent-encoded bytes, decoded
+    pct_sequence = b''
+    string = res[0]
+    for item in res[1:]:
+        try:
+            if not item:
+                raise ValueError
+            pct_sequence += bytes.fromhex(item[:2])
+            rest = item[2:]
+            if not rest:
+                # This segment was just a single percent-encoded character.
+                # May be part of a sequence of code units, so delay decoding.
+                # (Stored in pct_sequence).
+                continue
+        except ValueError:
+            rest = '%' + item
+        # Encountered non-percent-encoded characters. Flush the current
+        # pct_sequence.
+        string += pct_sequence.decode(encoding, errors) + rest
+        pct_sequence = b''
+    if pct_sequence:
+        # Flush the final pct_sequence
+        string += pct_sequence.decode(encoding, errors)
+    return string
+
+def parse_qs(qs, keep_blank_values=False, strict_parsing=False,
+             encoding='utf-8', errors='replace'):
+    """Parse a query given as a string argument.
+
+        Arguments:
+
+        qs: percent-encoded query string to be parsed
+
+        keep_blank_values: flag indicating whether blank values in
+            percent-encoded queries should be treated as blank strings.
+            A true value indicates that blanks should be retained as
+            blank strings.  The default false value indicates that
+            blank values are to be ignored and treated as if they were
+            not included.
+
+        strict_parsing: flag indicating what to do with parsing errors.
+            If false (the default), errors are silently ignored.
+            If true, errors raise a ValueError exception.
+
+        encoding and errors: specify how to decode percent-encoded sequences
+            into Unicode characters, as accepted by the bytes.decode() method.
+    """
+    dict = {}
+    pairs = parse_qsl(qs, keep_blank_values, strict_parsing,
+                      encoding=encoding, errors=errors)
+    for name, value in pairs:
+        if name in dict:
+            dict[name].append(value)
+        else:
+            dict[name] = [value]
+    return dict
+
+def parse_qsl(qs, keep_blank_values=False, strict_parsing=False,
+              encoding='utf-8', errors='replace'):
+    """Parse a query given as a string argument.
+
+    Arguments:
+
+    qs: percent-encoded query string to be parsed
+
+    keep_blank_values: flag indicating whether blank values in
+        percent-encoded queries should be treated as blank strings.  A
+        true value indicates that blanks should be retained as blank
+        strings.  The default false value indicates that blank values
+        are to be ignored and treated as if they were  not included.
+
+    strict_parsing: flag indicating what to do with parsing errors. If
+        false (the default), errors are silently ignored. If true,
+        errors raise a ValueError exception.
+
+    encoding and errors: specify how to decode percent-encoded sequences
+        into Unicode characters, as accepted by the bytes.decode() method.
+
+    Returns a list, as G-d intended.
+    """
+    qs, _coerce_result = _coerce_args(qs)
+    pairs = []
+    for s1 in qs.split('&'):
+        for s2 in s1.split(';'):
+            pairs.append(s2)
+    r = []
+    for name_value in pairs:
+        if not name_value and not strict_parsing:
+            continue
+        nv = name_value.split('=', 1)
+        if len(nv) != 2:
+            if strict_parsing:
+                raise ValueError("bad query field: %r" % (name_value,))
+            # Handle case of a control-name with no equal sign
+            if keep_blank_values:
+                nv.append('')
+            else:
+                continue
+        if len(nv[1]) or keep_blank_values:
+            name = nv[0].replace('+', ' ')
+            name = unquote(name, encoding=encoding, errors=errors)
+            name = _coerce_result(name)
+            value = nv[1].replace('+', ' ')
+            value = unquote(value, encoding=encoding, errors=errors)
+            value = _coerce_result(value)
+            r.append((name, value))
+    return r
+
+def unquote_plus(string, encoding='utf-8', errors='replace'):
+    """Like unquote(), but also replace plus signs by spaces, as required for
+    unquoting HTML form values.
+
+    unquote_plus('%7e/abc+def') -> '~/abc def'
+    """
+    string = string.replace('+', ' ')
+    return unquote(string, encoding, errors)
+
+_ALWAYS_SAFE = frozenset(b'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+                         b'abcdefghijklmnopqrstuvwxyz'
+                         b'0123456789'
+                         b'_.-')
+_ALWAYS_SAFE_BYTES = bytes(_ALWAYS_SAFE)
+_safe_quoters = {}
+
+class Quoter(collections.defaultdict):
+    """A mapping from bytes (in range(0,256)) to strings.
+
+    String values are percent-encoded byte values, unless the key < 128, and
+    in the "safe" set (either the specified safe set, or default set).
+    """
+    # Keeps a cache internally, using defaultdict, for efficiency (lookups
+    # of cached keys don't call Python code at all).
+    def __init__(self, safe):
+        """safe: bytes object."""
+        self.safe = _ALWAYS_SAFE.union(safe)
+
+    def __repr__(self):
+        # Without this, will just display as a defaultdict
+        return "<Quoter %r>" % dict(self)
+
+    def __missing__(self, b):
+        # Handle a cache miss. Store quoted string in cache and return.
+        res = chr(b) if b in self.safe else '%{:02X}'.format(b)
+        self[b] = res
+        return res
+
+def quote(string, safe='/', encoding=None, errors=None):
+    """quote('abc def') -> 'abc%20def'
+
+    Each part of a URL, e.g. the path info, the query, etc., has a
+    different set of reserved characters that must be quoted.
+
+    RFC 2396 Uniform Resource Identifiers (URI): Generic Syntax lists
+    the following reserved characters.
+
+    reserved    = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" |
+                  "$" | ","
+
+    Each of these characters is reserved in some component of a URL,
+    but not necessarily in all of them.
+
+    By default, the quote function is intended for quoting the path
+    section of a URL.  Thus, it will not encode '/'.  This character
+    is reserved, but in typical usage the quote function is being
+    called on a path where the existing slash characters are used as
+    reserved characters.
+
+    string and safe may be either str or bytes objects. encoding must
+    not be specified if string is a str.
+
+    The optional encoding and errors parameters specify how to deal with
+    non-ASCII characters, as accepted by the str.encode method.
+    By default, encoding='utf-8' (characters are encoded with UTF-8), and
+    errors='strict' (unsupported characters raise a UnicodeEncodeError).
+    """
+    if isinstance(string, str):
+        if not string:
+            return string
+        if encoding is None:
+            encoding = 'utf-8'
+        if errors is None:
+            errors = 'strict'
+        string = string.encode(encoding, errors)
+    else:
+        if encoding is not None:
+            raise TypeError("quote() doesn't support 'encoding' for bytes")
+        if errors is not None:
+            raise TypeError("quote() doesn't support 'errors' for bytes")
+    return quote_from_bytes(string, safe)
+
+def quote_plus(string, safe='', encoding=None, errors=None):
+    """Like quote(), but also replace ' ' with '+', as required for quoting
+    HTML form values. Plus signs in the original string are escaped unless
+    they are included in safe. It also does not have safe default to '/'.
+    """
+    # Check if ' ' in string, where string may either be a str or bytes.  If
+    # there are no spaces, the regular quote will produce the right answer.
+    if ((isinstance(string, str) and ' ' not in string) or
+        (isinstance(string, bytes) and b' ' not in string)):
+        return quote(string, safe, encoding, errors)
+    if isinstance(safe, str):
+        space = ' '
+    else:
+        space = b' '
+    string = quote(string, safe + space, encoding, errors)
+    return string.replace(' ', '+')
+
+def quote_from_bytes(bs, safe='/'):
+    """Like quote(), but accepts a bytes object rather than a str, and does
+    not perform string-to-bytes encoding.  It always returns an ASCII string.
+    quote_from_bytes(b'abc def\xab') -> 'abc%20def%AB'
+    """
+    if not isinstance(bs, (bytes, bytearray)):
+        raise TypeError("quote_from_bytes() expected bytes")
+    if not bs:
+        return ''
+    if isinstance(safe, str):
+        # Normalize 'safe' by converting to bytes and removing non-ASCII chars
+        safe = safe.encode('ascii', 'ignore')
+    else:
+        safe = bytes([c for c in safe if c < 128])
+    if not bs.rstrip(_ALWAYS_SAFE_BYTES + safe):
+        return bs.decode()
+    try:
+        quoter = _safe_quoters[safe]
+    except KeyError:
+        _safe_quoters[safe] = quoter = Quoter(safe).__getitem__
+    return ''.join([quoter(char) for char in bs])
+
+def urlencode(query, doseq=False, safe='', encoding=None, errors=None):
+    """Encode a sequence of two-element tuples or dictionary into a URL query string.
+
+    If any values in the query arg are sequences and doseq is true, each
+    sequence element is converted to a separate parameter.
+
+    If the query arg is a sequence of two-element tuples, the order of the
+    parameters in the output will match the order of parameters in the
+    input.
+
+    The query arg may be either a string or a bytes type. When query arg is a
+    string, the safe, encoding and error parameters are sent the quote_plus for
+    encoding.
+    """
+
+    if hasattr(query, "items"):
+        query = query.items()
+    else:
+        # It's a bother at times that strings and string-like objects are
+        # sequences.
+        try:
+            # non-sequence items should not work with len()
+            # non-empty strings will fail this
+            if len(query) and not isinstance(query[0], tuple):
+                raise TypeError
+            # Zero-length sequences of all types will get here and succeed,
+            # but that's a minor nit.  Since the original implementation
+            # allowed empty dicts that type of behavior probably should be
+            # preserved for consistency
+        except TypeError:
+            ty, va, tb = sys.exc_info()
+            raise TypeError("not a valid non-string sequence "
+                            "or mapping object").with_traceback(tb)
+
+    l = []
+    if not doseq:
+        for k, v in query:
+            if isinstance(k, bytes):
+                k = quote_plus(k, safe)
+            else:
+                k = quote_plus(str(k), safe, encoding, errors)
+
+            if isinstance(v, bytes):
+                v = quote_plus(v, safe)
+            else:
+                v = quote_plus(str(v), safe, encoding, errors)
+            l.append(k + '=' + v)
+    else:
+        for k, v in query:
+            if isinstance(k, bytes):
+                k = quote_plus(k, safe)
+            else:
+                k = quote_plus(str(k), safe, encoding, errors)
+
+            if isinstance(v, bytes):
+                v = quote_plus(v, safe)
+                l.append(k + '=' + v)
+            elif isinstance(v, str):
+                v = quote_plus(v, safe, encoding, errors)
+                l.append(k + '=' + v)
+            else:
+                try:
+                    # Is this a sufficient test for sequence-ness?
+                    x = len(v)
+                except TypeError:
+                    # not a sequence
+                    v = quote_plus(str(v), safe, encoding, errors)
+                    l.append(k + '=' + v)
+                else:
+                    # loop over the sequence
+                    for elt in v:
+                        if isinstance(elt, bytes):
+                            elt = quote_plus(elt, safe)
+                        else:
+                            elt = quote_plus(str(elt), safe, encoding, errors)
+                        l.append(k + '=' + elt)
+    return '&'.join(l)
+
+# Utilities to parse URLs (most of these return None for missing parts):
+# unwrap('<URL:type://host/path>') --> 'type://host/path'
+# splittype('type:opaquestring') --> 'type', 'opaquestring'
+# splithost('//host[:port]/path') --> 'host[:port]', '/path'
+# splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'
+# splitpasswd('user:passwd') -> 'user', 'passwd'
+# splitport('host:port') --> 'host', 'port'
+# splitquery('/path?query') --> '/path', 'query'
+# splittag('/path#tag') --> '/path', 'tag'
+# splitattr('/path;attr1=value1;attr2=value2;...') ->
+#   '/path', ['attr1=value1', 'attr2=value2', ...]
+# splitvalue('attr=value') --> 'attr', 'value'
+# urllib.parse.unquote('abc%20def') -> 'abc def'
+# quote('abc def') -> 'abc%20def')
+
+def to_bytes(url):
+    """to_bytes(u"URL") --> 'URL'."""
+    # Most URL schemes require ASCII. If that changes, the conversion
+    # can be relaxed.
+    # XXX get rid of to_bytes()
+    if isinstance(url, str):
+        try:
+            url = url.encode("ASCII").decode()
+        except UnicodeError:
+            raise UnicodeError("URL " + repr(url) +
+                               " contains non-ASCII characters")
+    return url
+
+def unwrap(url):
+    """unwrap('<URL:type://host/path>') --> 'type://host/path'."""
+    url = str(url).strip()
+    if url[:1] == '<' and url[-1:] == '>':
+        url = url[1:-1].strip()
+    if url[:4] == 'URL:': url = url[4:].strip()
+    return url
+
+_typeprog = None
+def splittype(url):
+    """splittype('type:opaquestring') --> 'type', 'opaquestring'."""
+    global _typeprog
+    if _typeprog is None:
+        import re
+        _typeprog = re.compile('^([^/:]+):')
+
+    match = _typeprog.match(url)
+    if match:
+        scheme = match.group(1)
+        return scheme.lower(), url[len(scheme) + 1:]
+    return None, url
+
+_hostprog = None
+def splithost(url):
+    """splithost('//host[:port]/path') --> 'host[:port]', '/path'."""
+    global _hostprog
+    if _hostprog is None:
+        import re
+        _hostprog = re.compile('^//([^/?]*)(.*)$')
+
+    match = _hostprog.match(url)
+    if match:
+        host_port = match.group(1)
+        path = match.group(2)
+        if path and not path.startswith('/'):
+            path = '/' + path
+        return host_port, path
+    return None, url
+
+_userprog = None
+def splituser(host):
+    """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
+    global _userprog
+    if _userprog is None:
+        import re
+        _userprog = re.compile('^(.*)@(.*)$')
+
+    match = _userprog.match(host)
+    if match: return match.group(1, 2)
+    return None, host
+
+_passwdprog = None
+def splitpasswd(user):
+    """splitpasswd('user:passwd') -> 'user', 'passwd'."""
+    global _passwdprog
+    if _passwdprog is None:
+        import re
+        _passwdprog = re.compile('^([^:]*):(.*)$',re.S)
+
+    match = _passwdprog.match(user)
+    if match: return match.group(1, 2)
+    return user, None
+
+# splittag('/path#tag') --> '/path', 'tag'
+_portprog = None
+def splitport(host):
+    """splitport('host:port') --> 'host', 'port'."""
+    global _portprog
+    if _portprog is None:
+        import re
+        _portprog = re.compile('^(.*):([0-9]+)$')
+
+    match = _portprog.match(host)
+    if match: return match.group(1, 2)
+    return host, None
+
+_nportprog = None
+def splitnport(host, defport=-1):
+    """Split host and port, returning numeric port.
+    Return given default port if no ':' found; defaults to -1.
+    Return numerical port if a valid number are found after ':'.
+    Return None if ':' but not a valid number."""
+    global _nportprog
+    if _nportprog is None:
+        import re
+        _nportprog = re.compile('^(.*):(.*)$')
+
+    match = _nportprog.match(host)
+    if match:
+        host, port = match.group(1, 2)
+        try:
+            if not port: raise ValueError("no digits")
+            nport = int(port)
+        except ValueError:
+            nport = None
+        return host, nport
+    return host, defport
+
+_queryprog = None
+def splitquery(url):
+    """splitquery('/path?query') --> '/path', 'query'."""
+    global _queryprog
+    if _queryprog is None:
+        import re
+        _queryprog = re.compile('^(.*)\?([^?]*)$')
+
+    match = _queryprog.match(url)
+    if match: return match.group(1, 2)
+    return url, None
+
+_tagprog = None
+def splittag(url):
+    """splittag('/path#tag') --> '/path', 'tag'."""
+    global _tagprog
+    if _tagprog is None:
+        import re
+        _tagprog = re.compile('^(.*)#([^#]*)$')
+
+    match = _tagprog.match(url)
+    if match: return match.group(1, 2)
+    return url, None
+
+def splitattr(url):
+    """splitattr('/path;attr1=value1;attr2=value2;...') ->
+        '/path', ['attr1=value1', 'attr2=value2', ...]."""
+    words = url.split(';')
+    return words[0], words[1:]
+
+_valueprog = None
+def splitvalue(attr):
+    """splitvalue('attr=value') --> 'attr', 'value'."""
+    global _valueprog
+    if _valueprog is None:
+        import re
+        _valueprog = re.compile('^([^=]*)=(.*)$')
+
+    match = _valueprog.match(attr)
+    if match: return match.group(1, 2)
+    return attr, None
diff --git a/test-data/stdlib-samples/3.2/posixpath.py b/test-data/stdlib-samples/3.2/posixpath.py
new file mode 100644
index 0000000..cf5d59e
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/posixpath.py
@@ -0,0 +1,466 @@
+"""Common operations on Posix pathnames.
+
+Instead of importing this module directly, import os and refer to
+this module as os.path.  The "os.path" name is an alias for this
+module on Posix systems; on other systems (e.g. Mac, Windows),
+os.path provides the same operations in a manner specific to that
+platform, and is an alias to another module (e.g. macpath, ntpath).
+
+Some of this can actually be useful on non-Posix systems too, e.g.
+for manipulation of the pathname component of URLs.
+"""
+
+import os
+import sys
+import stat
+import genericpath
+from genericpath import *
+
+from typing import (
+    Tuple, BinaryIO, TextIO, Pattern, AnyStr, List, Set, Any, Union, cast
+)
+
+__all__ = ["normcase","isabs","join","splitdrive","split","splitext",
+           "basename","dirname","commonprefix","getsize","getmtime",
+           "getatime","getctime","islink","exists","lexists","isdir","isfile",
+           "ismount", "expanduser","expandvars","normpath","abspath",
+           "samefile","sameopenfile","samestat",
+           "curdir","pardir","sep","pathsep","defpath","altsep","extsep",
+           "devnull","realpath","supports_unicode_filenames","relpath"]
+
+# Strings representing various path-related bits and pieces.
+# These are primarily for export; internally, they are hardcoded.
+curdir = '.'
+pardir = '..'
+extsep = '.'
+sep = '/'
+pathsep = ':'
+defpath = ':/bin:/usr/bin'
+altsep = None # type: str
+devnull = '/dev/null'
+
+def _get_sep(path: AnyStr) -> AnyStr:
+    if isinstance(path, bytes):
+        return b'/'
+    else:
+        return '/'
+
+# Normalize the case of a pathname.  Trivial in Posix, string.lower on Mac.
+# On MS-DOS this may also turn slashes into backslashes; however, other
+# normalizations (such as optimizing '../' away) are not allowed
+# (another function should be defined to do that).
+
+def normcase(s: AnyStr) -> AnyStr:
+    """Normalize case of pathname.  Has no effect under Posix"""
+    # TODO: on Mac OS X, this should really return s.lower().
+    if not isinstance(s, (bytes, str)):
+        raise TypeError("normcase() argument must be str or bytes, "
+                        "not '{}'".format(s.__class__.__name__))
+    return cast(AnyStr, s)
+
+
+# Return whether a path is absolute.
+# Trivial in Posix, harder on the Mac or MS-DOS.
+
+def isabs(s: AnyStr) -> bool:
+    """Test whether a path is absolute"""
+    sep = _get_sep(s)
+    return s.startswith(sep)
+
+
+# Join pathnames.
+# Ignore the previous parts if a part is absolute.
+# Insert a '/' unless the first part is empty or already ends in '/'.
+
+def join(a: AnyStr, *p: AnyStr) -> AnyStr:
+    """Join two or more pathname components, inserting '/' as needed.
+    If any component is an absolute path, all previous path components
+    will be discarded."""
+    sep = _get_sep(a)
+    path = a
+    for b in p:
+        if b.startswith(sep):
+            path = b
+        elif not path or path.endswith(sep):
+            path +=  b
+        else:
+            path += sep + b
+    return path
+
+
+# Split a path in head (everything up to the last '/') and tail (the
+# rest).  If the path ends in '/', tail will be empty.  If there is no
+# '/' in the path, head  will be empty.
+# Trailing '/'es are stripped from head unless it is the root.
+
+def split(p: AnyStr) -> Tuple[AnyStr, AnyStr]:
+    """Split a pathname.  Returns tuple "(head, tail)" where "tail" is
+    everything after the final slash.  Either part may be empty."""
+    sep = _get_sep(p)
+    i = p.rfind(sep) + 1
+    head, tail = p[:i], p[i:]
+    if head and head != sep*len(head):
+        head = head.rstrip(sep)
+    return head, tail
+
+
+# Split a path in root and extension.
+# The extension is everything starting at the last dot in the last
+# pathname component; the root is everything before that.
+# It is always true that root + ext == p.
+
+def splitext(p: AnyStr) -> Tuple[AnyStr, AnyStr]:
+    if isinstance(p, bytes):
+        sep = b'/'
+        extsep = b'.'
+    else:
+        sep = '/'
+        extsep = '.'
+    return genericpath._splitext(p, sep, None, extsep)
+splitext.__doc__ = genericpath._splitext.__doc__
+
+# Split a pathname into a drive specification and the rest of the
+# path.  Useful on DOS/Windows/NT; on Unix, the drive is always empty.
+
+def splitdrive(p: AnyStr) -> Tuple[AnyStr, AnyStr]:
+    """Split a pathname into drive and path. On Posix, drive is always
+    empty."""
+    return p[:0], p
+
+
+# Return the tail (basename) part of a path, same as split(path)[1].
+
+def basename(p: AnyStr) -> AnyStr:
+    """Returns the final component of a pathname"""
+    sep = _get_sep(p)
+    i = p.rfind(sep) + 1
+    return p[i:]
+
+
+# Return the head (dirname) part of a path, same as split(path)[0].
+
+def dirname(p: AnyStr) -> AnyStr:
+    """Returns the directory component of a pathname"""
+    sep = _get_sep(p)
+    i = p.rfind(sep) + 1
+    head = p[:i]
+    if head and head != sep*len(head):
+        head = head.rstrip(sep)
+    return head
+
+
+# Is a path a symbolic link?
+# This will always return false on systems where os.lstat doesn't exist.
+
+def islink(path: AnyStr) -> bool:
+    """Test whether a path is a symbolic link"""
+    try:
+        st = os.lstat(path)
+    except (os.error, AttributeError):
+        return False
+    return stat.S_ISLNK(st.st_mode)
+
+# Being true for dangling symbolic links is also useful.
+
+def lexists(path: AnyStr) -> bool:
+    """Test whether a path exists.  Returns True for broken symbolic links"""
+    try:
+        os.lstat(path)
+    except os.error:
+        return False
+    return True
+
+
+# Are two filenames really pointing to the same file?
+
+def samefile(f1: AnyStr, f2: AnyStr) -> bool:
+    """Test whether two pathnames reference the same actual file"""
+    s1 = os.stat(f1)
+    s2 = os.stat(f2)
+    return samestat(s1, s2)
+
+
+# Are two open files really referencing the same file?
+# (Not necessarily the same file descriptor!)
+
+def sameopenfile(fp1: int, fp2: int) -> bool:
+    """Test whether two open file objects reference the same file"""
+    s1 = os.fstat(fp1)
+    s2 = os.fstat(fp2)
+    return samestat(s1, s2)
+
+
+# Are two stat buffers (obtained from stat, fstat or lstat)
+# describing the same file?
+
+def samestat(s1: os.stat_result, s2: os.stat_result) -> bool:
+    """Test whether two stat buffers reference the same file"""
+    return s1.st_ino == s2.st_ino and \
+           s1.st_dev == s2.st_dev
+
+
+# Is a path a mount point?
+# (Does this work for all UNIXes?  Is it even guaranteed to work by Posix?)
+
+def ismount(path: AnyStr) -> bool:
+    """Test whether a path is a mount point"""
+    if islink(path):
+        # A symlink can never be a mount point
+        return False
+    try:
+        s1 = os.lstat(path)
+        if isinstance(path, bytes):
+            parent = join(path, b'..')
+        else:
+            parent = join(path, '..')
+        s2 = os.lstat(parent)
+    except os.error:
+        return False # It doesn't exist -- so not a mount point :-)
+    dev1 = s1.st_dev
+    dev2 = s2.st_dev
+    if dev1 != dev2:
+        return True     # path/.. on a different device as path
+    ino1 = s1.st_ino
+    ino2 = s2.st_ino
+    if ino1 == ino2:
+        return True     # path/.. is the same i-node as path
+    return False
+
+
+# Expand paths beginning with '~' or '~user'.
+# '~' means $HOME; '~user' means that user's home directory.
+# If the path doesn't begin with '~', or if the user or $HOME is unknown,
+# the path is returned unchanged (leaving error reporting to whatever
+# function is called with the expanded path as argument).
+# See also module 'glob' for expansion of *, ? and [...] in pathnames.
+# (A function should also be defined to do full *sh-style environment
+# variable expansion.)
+
+def expanduser(path: AnyStr) -> AnyStr:
+    """Expand ~ and ~user constructions.  If user or $HOME is unknown,
+    do nothing."""
+    if isinstance(path, bytes):
+        tilde = b'~'
+    else:
+        tilde = '~'
+    if not path.startswith(tilde):
+        return path
+    sep = _get_sep(path)
+    i = path.find(sep, 1)
+    if i < 0:
+        i = len(path)
+    if i == 1:
+        userhome = None  # type: Union[str, bytes]
+        if 'HOME' not in os.environ:
+            import pwd
+            userhome = pwd.getpwuid(os.getuid()).pw_dir
+        else:
+            userhome = os.environ['HOME']
+    else:
+        import pwd
+        name = path[1:i]  # type: Union[str, bytes]
+        if isinstance(name, bytes):
+            name = str(name, 'ASCII')
+        try:
+            pwent = pwd.getpwnam(name)
+        except KeyError:
+            return path
+        userhome = pwent.pw_dir
+    if isinstance(path, bytes):
+        userhome = os.fsencode(userhome)
+        root = b'/'
+    else:
+        root = '/'
+    userhome = userhome.rstrip(root)
+    return (userhome + path[i:]) or root
+
+
+# Expand paths containing shell variable substitutions.
+# This expands the forms $variable and ${variable} only.
+# Non-existent variables are left unchanged.
+
+_varprog = None # type: Pattern[str]
+_varprogb = None # type: Pattern[bytes]
+
+def expandvars(path: AnyStr) -> AnyStr:
+    """Expand shell variables of form $var and ${var}.  Unknown variables
+    are left unchanged."""
+    global _varprog, _varprogb
+    if isinstance(path, bytes):
+        if b'$' not in path:
+            return path
+        if not _varprogb:
+            import re
+            _varprogb = re.compile(br'\$(\w+|\{[^}]*\})', re.ASCII)
+        search = _varprogb.search
+        start = b'{'
+        end = b'}'
+    else:
+        if '$' not in path:
+            return path
+        if not _varprog:
+            import re
+            _varprog = re.compile(r'\$(\w+|\{[^}]*\})', re.ASCII)
+        search = _varprog.search
+        start = '{'
+        end = '}'
+    i = 0
+    while True:
+        m = search(path, i)
+        if not m:
+            break
+        i, j = m.span(0)
+        name = None  # type: Union[str, bytes]
+        name = m.group(1)
+        if name.startswith(start) and name.endswith(end):
+            name = name[1:-1]
+        if isinstance(name, bytes):
+            name = str(name, 'ASCII')
+        if name in os.environ:
+            tail = path[j:]
+            value = None  # type: Union[str, bytes]
+            value = os.environ[name]
+            if isinstance(path, bytes):
+                value = value.encode('ASCII')
+            path = path[:i] + value
+            i = len(path)
+            path += tail
+        else:
+            i = j
+    return path
+
+
+# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A/B.
+# It should be understood that this may change the meaning of the path
+# if it contains symbolic links!
+
+def normpath(path: AnyStr) -> AnyStr:
+    """Normalize path, eliminating double slashes, etc."""
+    if isinstance(path, bytes):
+        sep = b'/'
+        empty = b''
+        dot = b'.'
+        dotdot = b'..'
+    else:
+        sep = '/'
+        empty = ''
+        dot = '.'
+        dotdot = '..'
+    if path == empty:
+        return dot
+    initial_slashes = path.startswith(sep) # type: int
+    # POSIX allows one or two initial slashes, but treats three or more
+    # as single slash.
+    if (initial_slashes and
+        path.startswith(sep*2) and not path.startswith(sep*3)):
+        initial_slashes = 2
+    comps = path.split(sep)
+    new_comps = []  # type: List[AnyStr]
+    for comp in comps:
+        if comp in (empty, dot):
+            continue
+        if (comp != dotdot or (not initial_slashes and not new_comps) or
+             (new_comps and new_comps[-1] == dotdot)):
+            new_comps.append(comp)
+        elif new_comps:
+            new_comps.pop()
+    comps = new_comps
+    path = sep.join(comps)
+    if initial_slashes:
+        path = sep*initial_slashes + path
+    return path or dot
+
+
+def abspath(path: AnyStr) -> AnyStr:
+    """Return an absolute path."""
+    if not isabs(path):
+        if isinstance(path, bytes):
+            cwd = os.getcwdb()
+        else:
+            cwd = os.getcwd()
+        path = join(cwd, path)
+    return normpath(path)
+
+
+# Return a canonical path (i.e. the absolute location of a file on the
+# filesystem).
+
+def realpath(filename: AnyStr) -> AnyStr:
+    """Return the canonical path of the specified filename, eliminating any
+symbolic links encountered in the path."""
+    if isinstance(filename, bytes):
+        sep = b'/'
+        empty = b''
+    else:
+        sep = '/'
+        empty = ''
+    if isabs(filename):
+        bits = [sep] + filename.split(sep)[1:]
+    else:
+        bits = [empty] + filename.split(sep)
+
+    for i in range(2, len(bits)+1):
+        component = join(*bits[0:i])
+        # Resolve symbolic links.
+        if islink(component):
+            resolved = _resolve_link(component)
+            if resolved is None:
+                # Infinite loop -- return original component + rest of the path
+                return abspath(join(*([component] + bits[i:])))
+            else:
+                newpath = join(*([resolved] + bits[i:]))
+                return realpath(newpath)
+
+    return abspath(filename)
+
+
+def _resolve_link(path: AnyStr) -> AnyStr:
+    """Internal helper function.  Takes a path and follows symlinks
+    until we either arrive at something that isn't a symlink, or
+    encounter a path we've seen before (meaning that there's a loop).
+    """
+    paths_seen = set()  # type: Set[AnyStr]
+    while islink(path):
+        if path in paths_seen:
+            # Already seen this path, so we must have a symlink loop
+            return None
+        paths_seen.add(path)
+        # Resolve where the link points to
+        resolved = os.readlink(path)
+        if not isabs(resolved):
+            dir = dirname(path)
+            path = normpath(join(dir, resolved))
+        else:
+            path = normpath(resolved)
+    return path
+
+supports_unicode_filenames = (sys.platform == 'darwin')
+
+def relpath(path: AnyStr, start: AnyStr = None) -> AnyStr:
+    """Return a relative version of a path"""
+
+    if not path:
+        raise ValueError("no path specified")
+
+    if isinstance(path, bytes):
+        curdir = b'.'
+        sep = b'/'
+        pardir = b'..'
+    else:
+        curdir = '.'
+        sep = '/'
+        pardir = '..'
+
+    if start is None:
+        start = curdir
+
+    start_list = [x for x in abspath(start).split(sep) if x]
+    path_list = [x for x in abspath(path).split(sep) if x]
+
+    # Work out how much of the filepath is shared by start and path.
+    i = len(commonprefix([start_list, path_list]))
+
+    rel_list = [pardir] * (len(start_list)-i) + path_list[i:]
+    if not rel_list:
+        return curdir
+    return join(*rel_list)
diff --git a/test-data/stdlib-samples/3.2/pprint.py b/test-data/stdlib-samples/3.2/pprint.py
new file mode 100644
index 0000000..650c1a3
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/pprint.py
@@ -0,0 +1,380 @@
+#  Author:      Fred L. Drake, Jr.
+#               fdrake at acm.org
+#
+#  This is a simple little module I wrote to make life easier.  I didn't
+#  see anything quite like it in the library, though I may have overlooked
+#  something.  I wrote this when I was trying to read some heavily nested
+#  tuples with fairly non-descriptive content.  This is modeled very much
+#  after Lisp/Scheme - style pretty-printing of lists.  If you find it
+#  useful, thank small children who sleep at night.
+
+"""Support to pretty-print lists, tuples, & dictionaries recursively.
+
+Very simple, but useful, especially in debugging data structures.
+
+Classes
+-------
+
+PrettyPrinter()
+    Handle pretty-printing operations onto a stream using a configured
+    set of formatting parameters.
+
+Functions
+---------
+
+pformat()
+    Format a Python object into a pretty-printed representation.
+
+pprint()
+    Pretty-print a Python object to a stream [default is sys.stdout].
+
+saferepr()
+    Generate a 'standard' repr()-like value, but protect against recursive
+    data structures.
+
+"""
+
+import sys as _sys
+from collections import OrderedDict as _OrderedDict
+from io import StringIO as _StringIO
+
+from typing import Any, Tuple, Dict, TextIO, cast, List
+
+__all__ = ["pprint","pformat","isreadable","isrecursive","saferepr",
+           "PrettyPrinter"]
+
+# cache these for faster access:
+_commajoin = ", ".join
+_id = id
+_len = len
+_type = type
+
+
+def pprint(object: object, stream: TextIO = None, indent: int = 1,
+           width: int = 80, depth: int = None) -> None:
+    """Pretty-print a Python object to a stream [default is sys.stdout]."""
+    printer = PrettyPrinter(
+        stream=stream, indent=indent, width=width, depth=depth)
+    printer.pprint(object)
+
+def pformat(object: object, indent: int = 1, width: int = 80,
+            depth: int = None) -> str:
+    """Format a Python object into a pretty-printed representation."""
+    return PrettyPrinter(indent=indent, width=width, depth=depth).pformat(object)
+
+def saferepr(object: object) -> str:
+    """Version of repr() which can handle recursive data structures."""
+    return _safe_repr(object, {}, None, 0)[0]
+
+def isreadable(object: object) -> bool:
+    """Determine if saferepr(object) is readable by eval()."""
+    return _safe_repr(object, {}, None, 0)[1]
+
+def isrecursive(object: object) -> bool:
+    """Determine if object requires a recursive representation."""
+    return _safe_repr(object, {}, None, 0)[2]
+
+class _safe_key:
+    """Helper function for key functions when sorting unorderable objects.
+
+    The wrapped-object will fallback to an Py2.x style comparison for
+    unorderable types (sorting first comparing the type name and then by
+    the obj ids).  Does not work recursively, so dict.items() must have
+    _safe_key applied to both the key and the value.
+
+    """
+
+    __slots__ = ['obj']
+
+    def __init__(self, obj: Any) -> None:
+        self.obj = obj
+
+    def __lt__(self, other: Any) -> Any:
+        rv = self.obj.__lt__(other.obj) # type: Any
+        if rv is NotImplemented:
+            rv = (str(type(self.obj)), id(self.obj)) < \
+                 (str(type(other.obj)), id(other.obj))
+        return rv
+
+def _safe_tuple(t: Tuple[Any, Any]) -> Tuple[_safe_key, _safe_key]:
+    "Helper function for comparing 2-tuples"
+    return _safe_key(t[0]), _safe_key(t[1])
+
+class PrettyPrinter:
+    def __init__(self, indent: int = 1, width: int = 80, depth: int = None,
+                 stream: TextIO = None) -> None:
+        """Handle pretty printing operations onto a stream using a set of
+        configured parameters.
+
+        indent
+            Number of spaces to indent for each level of nesting.
+
+        width
+            Attempted maximum number of columns in the output.
+
+        depth
+            The maximum depth to print out nested structures.
+
+        stream
+            The desired output stream.  If omitted (or false), the standard
+            output stream available at construction will be used.
+
+        """
+        indent = int(indent)
+        width = int(width)
+        assert indent >= 0, "indent must be >= 0"
+        assert depth is None or depth > 0, "depth must be > 0"
+        assert width, "width must be != 0"
+        self._depth = depth
+        self._indent_per_level = indent
+        self._width = width
+        if stream is not None:
+            self._stream = stream
+        else:
+            self._stream = _sys.stdout
+
+    def pprint(self, object: object) -> None:
+        self._format(object, self._stream, 0, 0, {}, 0)
+        self._stream.write("\n")
+
+    def pformat(self, object: object) -> str:
+        sio = _StringIO()
+        self._format(object, sio, 0, 0, {}, 0)
+        return sio.getvalue()
+
+    def isrecursive(self, object: object) -> int:
+        return self.format(object, {}, 0, 0)[2]
+
+    def isreadable(self, object: object) -> int:
+        s, readable, recursive = self.format(object, {}, 0, 0)
+        return readable and not recursive
+
+    def _format(self, object: object, stream: TextIO, indent: int,
+                allowance: int, context: Dict[int, int], level: int) -> None:
+        level = level + 1
+        objid = _id(object)
+        if objid in context:
+            stream.write(_recursion(object))
+            self._recursive = True
+            self._readable = False
+            return
+        rep = self._repr(object, context, level - 1)
+        typ = _type(object)
+        sepLines = _len(rep) > (self._width - 1 - indent - allowance)
+        write = stream.write
+
+        if self._depth and level > self._depth:
+            write(rep)
+            return
+
+        if sepLines:
+            r = getattr(typ, "__repr__", None)
+            if isinstance(object, dict):
+                write('{')
+                if self._indent_per_level > 1:
+                    write((self._indent_per_level - 1) * ' ')
+                length = _len(object)
+                if length:
+                    context[objid] = 1
+                    indent = indent + self._indent_per_level
+                    if issubclass(typ, _OrderedDict):
+                        items = list(object.items())
+                    else:
+                        items = sorted(object.items(), key=_safe_tuple)
+                    key, ent = items[0]
+                    rep = self._repr(key, context, level)
+                    write(rep)
+                    write(': ')
+                    self._format(ent, stream, indent + _len(rep) + 2,
+                                  allowance + 1, context, level)
+                    if length > 1:
+                        for key, ent in items[1:]:
+                            rep = self._repr(key, context, level)
+                            write(',\n%s%s: ' % (' '*indent, rep))
+                            self._format(ent, stream, indent + _len(rep) + 2,
+                                          allowance + 1, context, level)
+                    indent = indent - self._indent_per_level
+                    del context[objid]
+                write('}')
+                return
+
+            if ((issubclass(typ, list) and r is list.__repr__) or
+                (issubclass(typ, tuple) and r is tuple.__repr__) or
+                (issubclass(typ, set) and r is set.__repr__) or
+                (issubclass(typ, frozenset) and r is frozenset.__repr__)
+               ):
+                anyobj = cast(Any, object) # TODO Collection?
+                length = _len(anyobj)
+                if issubclass(typ, list):
+                    write('[')
+                    endchar = ']'
+                    lst = anyobj
+                elif issubclass(typ, set):
+                    if not length:
+                        write('set()')
+                        return
+                    write('{')
+                    endchar = '}'
+                    lst = sorted(anyobj, key=_safe_key)
+                elif issubclass(typ, frozenset):
+                    if not length:
+                        write('frozenset()')
+                        return
+                    write('frozenset({')
+                    endchar = '})'
+                    lst = sorted(anyobj, key=_safe_key)
+                    indent += 10
+                else:
+                    write('(')
+                    endchar = ')'
+                    lst = list(anyobj)
+                if self._indent_per_level > 1:
+                    write((self._indent_per_level - 1) * ' ')
+                if length:
+                    context[objid] = 1
+                    indent = indent + self._indent_per_level
+                    self._format(lst[0], stream, indent, allowance + 1,
+                                 context, level)
+                    if length > 1:
+                        for ent in lst[1:]:
+                            write(',\n' + ' '*indent)
+                            self._format(ent, stream, indent,
+                                          allowance + 1, context, level)
+                    indent = indent - self._indent_per_level
+                    del context[objid]
+                if issubclass(typ, tuple) and length == 1:
+                    write(',')
+                write(endchar)
+                return
+
+        write(rep)
+
+    def _repr(self, object: object, context: Dict[int, int],
+              level: int) -> str:
+        repr, readable, recursive = self.format(object, context.copy(),
+                                                self._depth, level)
+        if not readable:
+            self._readable = False
+        if recursive:
+            self._recursive = True
+        return repr
+
+    def format(self, object: object, context: Dict[int, int],
+               maxlevels: int, level: int) -> Tuple[str, int, int]:
+        """Format object for a specific context, returning a string
+        and flags indicating whether the representation is 'readable'
+        and whether the object represents a recursive construct.
+        """
+        return _safe_repr(object, context, maxlevels, level)
+
+
+# Return triple (repr_string, isreadable, isrecursive).
+
+def _safe_repr(object: object, context: Dict[int, int],
+               maxlevels: int, level: int) -> Tuple[str, bool, bool]:
+    typ = _type(object)
+    if typ is str:
+        s = cast(str, object)
+        if 'locale' not in _sys.modules:
+            return repr(object), True, False
+        if "'" in s and '"' not in s:
+            closure = '"'
+            quotes = {'"': '\\"'}
+        else:
+            closure = "'"
+            quotes = {"'": "\\'"}
+        qget = quotes.get
+        sio = _StringIO()
+        write = sio.write
+        for char in s:
+            if char.isalpha():
+                write(char)
+            else:
+                write(qget(char, repr(char)[1:-1]))
+        return ("%s%s%s" % (closure, sio.getvalue(), closure)), True, False
+
+    r = getattr(typ, "__repr__", None)
+    if issubclass(typ, dict) and r is dict.__repr__:
+        if not object:
+            return "{}", True, False
+        objid = _id(object)
+        if maxlevels and level >= maxlevels:
+            return "{...}", False, objid in context
+        if objid in context:
+            return _recursion(object), False, True
+        context[objid] = 1
+        readable = True
+        recursive = False
+        components = []  # type: List[str]
+        append = components.append
+        level += 1
+        saferepr = _safe_repr
+        items = sorted((cast(dict, object)).items(), key=_safe_tuple)
+        for k, v in items:
+            krepr, kreadable, krecur = saferepr(k, context, maxlevels, level)
+            vrepr, vreadable, vrecur = saferepr(v, context, maxlevels, level)
+            append("%s: %s" % (krepr, vrepr))
+            readable = readable and kreadable and vreadable
+            if krecur or vrecur:
+                recursive = True
+        del context[objid]
+        return "{%s}" % _commajoin(components), readable, recursive
+
+    if (issubclass(typ, list) and r is list.__repr__) or \
+       (issubclass(typ, tuple) and r is tuple.__repr__):
+        anyobj = cast(Any, object) # TODO Sequence?
+        if issubclass(typ, list):
+            if not object:
+                return "[]", True, False
+            format = "[%s]"
+        elif _len(anyobj) == 1:
+            format = "(%s,)"
+        else:
+            if not object:
+                return "()", True, False
+            format = "(%s)"
+        objid = _id(object)
+        if maxlevels and level >= maxlevels:
+            return format % "...", False, objid in context
+        if objid in context:
+            return _recursion(object), False, True
+        context[objid] = 1
+        readable = True
+        recursive = False
+        components = []
+        append = components.append
+        level += 1
+        for o in anyobj:
+            orepr, oreadable, orecur = _safe_repr(o, context, maxlevels, level)
+            append(orepr)
+            if not oreadable:
+                readable = False
+            if orecur:
+                recursive = True
+        del context[objid]
+        return format % _commajoin(components), readable, recursive
+
+    rep = repr(object)
+    return rep, bool(rep and not rep.startswith('<')), False
+
+
+def _recursion(object: object) -> str:
+    return ("<Recursion on %s with id=%s>"
+            % (_type(object).__name__, _id(object)))
+
+
+def _perfcheck(object: object = None) -> None:
+    import time
+    if object is None:
+        object = [("string", (1, 2), [3, 4], {5: 6, 7: 8})] * 100000
+    p = PrettyPrinter()
+    t1 = time.time()
+    _safe_repr(object, {}, None, 0)
+    t2 = time.time()
+    p.pformat(object)
+    t3 = time.time()
+    print("_safe_repr:", t2 - t1)
+    print("pformat:", t3 - t2)
+
+if __name__ == "__main__":
+    _perfcheck()
diff --git a/test-data/stdlib-samples/3.2/random.py b/test-data/stdlib-samples/3.2/random.py
new file mode 100644
index 0000000..8ce0a69
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/random.py
@@ -0,0 +1,743 @@
+"""Random variable generators.
+
+    integers
+    --------
+           uniform within range
+
+    sequences
+    ---------
+           pick random element
+           pick random sample
+           generate random permutation
+
+    distributions on the real line:
+    ------------------------------
+           uniform
+           triangular
+           normal (Gaussian)
+           lognormal
+           negative exponential
+           gamma
+           beta
+           pareto
+           Weibull
+
+    distributions on the circle (angles 0 to 2pi)
+    ---------------------------------------------
+           circular uniform
+           von Mises
+
+General notes on the underlying Mersenne Twister core generator:
+
+* The period is 2**19937-1.
+* It is one of the most extensively tested generators in existence.
+* The random() method is implemented in C, executes in a single Python step,
+  and is, therefore, threadsafe.
+
+"""
+
+from warnings import warn as _warn
+from types import MethodType as _MethodType, BuiltinMethodType as _BuiltinMethodType
+from math import log as _log, exp as _exp, pi as _pi, e as _e, ceil as _ceil
+from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin
+from os import urandom as _urandom
+from collections import Set as _Set, Sequence as _Sequence
+from hashlib import sha512 as _sha512
+
+from typing import (
+    Any, TypeVar, Iterable, Sequence, List, Callable, Set, cast, SupportsInt, Union
+)
+
+__all__ = ["Random","seed","random","uniform","randint","choice","sample",
+           "randrange","shuffle","normalvariate","lognormvariate",
+           "expovariate","vonmisesvariate","gammavariate","triangular",
+           "gauss","betavariate","paretovariate","weibullvariate",
+           "getstate","setstate", "getrandbits",
+           "SystemRandom"]
+
+NV_MAGICCONST = 4 * _exp(-0.5)/_sqrt(2.0)
+TWOPI = 2.0*_pi
+LOG4 = _log(4.0)
+SG_MAGICCONST = 1.0 + _log(4.5)
+BPF = 53        # Number of bits in a float
+RECIP_BPF = 2**-BPF # type: float
+
+
+# Translated by Guido van Rossum from C source provided by
+# Adrian Baddeley.  Adapted by Raymond Hettinger for use with
+# the Mersenne Twister  and os.urandom() core generators.
+
+import _random
+
+T = TypeVar('T')
+
+class Random(_random.Random):
+    """Random number generator base class used by bound module functions.
+
+    Used to instantiate instances of Random to get generators that don't
+    share state.
+
+    Class Random can also be subclassed if you want to use a different basic
+    generator of your own devising: in that case, override the following
+    methods:  random(), seed(), getstate(), and setstate().
+    Optionally, implement a getrandbits() method so that randrange()
+    can cover arbitrarily large ranges.
+
+    """
+
+    VERSION = 3     # used by getstate/setstate
+    gauss_next = 0.0
+
+    def __init__(self, x: object = None) -> None:
+        """Initialize an instance.
+
+        Optional argument x controls seeding, as for Random.seed().
+        """
+
+        self.seed(x)
+        self.gauss_next = None
+
+    def seed(self, a: Any = None, version: int = 2) -> None:
+        """Initialize internal state from hashable object.
+
+        None or no argument seeds from current time or from an operating
+        system specific randomness source if available.
+
+        For version 2 (the default), all of the bits are used if *a *is a str,
+        bytes, or bytearray.  For version 1, the hash() of *a* is used instead.
+
+        If *a* is an int, all bits are used.
+
+        """
+
+        if a is None:
+            try:
+                a = int.from_bytes(_urandom(32), 'big')
+            except NotImplementedError:
+                import time
+                a = int(time.time() * 256) # use fractional seconds
+
+        if version == 2:
+            if isinstance(a, (str, bytes, bytearray)):
+                if isinstance(a, str):
+                    a = a.encode()
+                a += _sha512(a).digest()
+                a = int.from_bytes(a, 'big')
+
+        super().seed(a)
+        self.gauss_next = None
+
+    def getstate(self) -> tuple:
+        """Return internal state; can be passed to setstate() later."""
+        return self.VERSION, super().getstate(), self.gauss_next
+
+    def setstate(self, state: tuple) -> None:
+        """Restore internal state from object returned by getstate()."""
+        version = state[0]
+        if version == 3:
+            version, internalstate, self.gauss_next = state
+            super().setstate(internalstate)
+        elif version == 2:
+            version, internalstate, self.gauss_next = state
+            # In version 2, the state was saved as signed ints, which causes
+            #   inconsistencies between 32/64-bit systems. The state is
+            #   really unsigned 32-bit ints, so we convert negative ints from
+            #   version 2 to positive longs for version 3.
+            try:
+                internalstate = tuple(x % (2**32) for x in internalstate)
+            except ValueError as e:
+                raise TypeError()
+            super().setstate(internalstate)
+        else:
+            raise ValueError("state with version %s passed to "
+                             "Random.setstate() of version %s" %
+                             (version, self.VERSION))
+
+## ---- Methods below this point do not need to be overridden when
+## ---- subclassing for the purpose of using a different core generator.
+
+## -------------------- pickle support  -------------------
+
+    def __getstate__(self) -> object: # for pickle
+        return self.getstate()
+
+    def __setstate__(self, state: Any) -> None:  # for pickle
+        self.setstate(state)
+
+    def __reduce__(self) -> object:
+        return self.__class__, (), self.getstate()
+
+## -------------------- integer methods  -------------------
+
+    def randrange(self, start: SupportsInt, stop: SupportsInt = None,
+                  step: int = 1, int: Callable[[SupportsInt],
+                                               int] = int) -> int:
+        """Choose a random item from range(start, stop[, step]).
+
+        This fixes the problem with randint() which includes the
+        endpoint; in Python this is usually not what you want.
+
+        Do not supply the 'int' argument.
+        """
+
+        # This code is a bit messy to make it fast for the
+        # common case while still doing adequate error checking.
+        istart = int(start)
+        if istart != start:
+            raise ValueError("non-integer arg 1 for randrange()")
+        if stop is None:
+            if istart > 0:
+                return self._randbelow(istart)
+            raise ValueError("empty range for randrange()")
+
+        # stop argument supplied.
+        istop = int(stop)
+        if istop != stop:
+            raise ValueError("non-integer stop for randrange()")
+        width = istop - istart
+        if step == 1 and width > 0:
+            return istart + self._randbelow(width)
+        if step == 1:
+            raise ValueError("empty range for randrange() (%d,%d, %d)" % (istart, istop, width))
+
+        # Non-unit step argument supplied.
+        istep = int(step)
+        if istep != step:
+            raise ValueError("non-integer step for randrange()")
+        if istep > 0:
+            n = (width + istep - 1) // istep
+        elif istep < 0:
+            n = (width + istep + 1) // istep
+        else:
+            raise ValueError("zero step for randrange()")
+
+        if n <= 0:
+            raise ValueError("empty range for randrange()")
+
+        return istart + istep*self._randbelow(n)
+
+    def randint(self, a: int, b: int) -> int:
+        """Return random integer in range [a, b], including both end points.
+        """
+
+        return self.randrange(a, b+1)
+
+    def _randbelow(self, n: int, int: Callable[[float], int] = int,
+                   maxsize: int = 1<<BPF,
+                   type: Callable[[object], type] = type,
+                   Method: type = _MethodType,
+                   BuiltinMethod: type = _BuiltinMethodType) -> int:
+        "Return a random int in the range [0,n).  Raises ValueError if n==0."
+
+        getrandbits = self.getrandbits
+        # Only call self.getrandbits if the original random() builtin method
+        # has not been overridden or if a new getrandbits() was supplied.
+        if type(self.random) is BuiltinMethod or type(getrandbits) is Method:
+            k = n.bit_length()  # don't use (n-1) here because n can be 1
+            r = getrandbits(k)          # 0 <= r < 2**k
+            while r >= n:
+                r = getrandbits(k)
+            return r
+        # There's an overriden random() method but no new getrandbits() method,
+        # so we can only use random() from here.
+        random = self.random
+        if n >= maxsize:
+            _warn("Underlying random() generator does not supply \n"
+                "enough bits to choose from a population range this large.\n"
+                "To remove the range limitation, add a getrandbits() method.")
+            return int(random() * n)
+        rem = maxsize % n
+        limit = (maxsize - rem) / maxsize   # int(limit * maxsize) % n == 0
+        s = random()
+        while s >= limit:
+            s = random()
+        return int(s*maxsize) % n
+
+## -------------------- sequence methods  -------------------
+
+    def choice(self, seq: Sequence[T]) -> T:
+        """Choose a random element from a non-empty sequence."""
+        try:
+            i = self._randbelow(len(seq))
+        except ValueError:
+            raise IndexError('Cannot choose from an empty sequence')
+        return seq[i]
+
+    def shuffle(self, x: List[T],
+                random: Callable[[], float] = None,
+                int: Callable[[float], int] = int) -> None:
+        """x, random=random.random -> shuffle list x in place; return None.
+
+        Optional arg random is a 0-argument function returning a random
+        float in [0.0, 1.0); by default, the standard random.random.
+        """
+
+        randbelow = self._randbelow
+        for i in reversed(range(1, len(x))):
+            # pick an element in x[:i+1] with which to exchange x[i]
+            j = randbelow(i+1) if random is None else int(random() * (i+1))
+            x[i], x[j] = x[j], x[i]
+
+    def sample(self, population: Union[_Set[T], _Sequence[T]], k: int) -> List[T]:
+        """Chooses k unique random elements from a population sequence or set.
+
+        Returns a new list containing elements from the population while
+        leaving the original population unchanged.  The resulting list is
+        in selection order so that all sub-slices will also be valid random
+        samples.  This allows raffle winners (the sample) to be partitioned
+        into grand prize and second place winners (the subslices).
+
+        Members of the population need not be hashable or unique.  If the
+        population contains repeats, then each occurrence is a possible
+        selection in the sample.
+
+        To choose a sample in a range of integers, use range as an argument.
+        This is especially fast and space efficient for sampling from a
+        large population:   sample(range(10000000), 60)
+        """
+
+        # Sampling without replacement entails tracking either potential
+        # selections (the pool) in a list or previous selections in a set.
+
+        # When the number of selections is small compared to the
+        # population, then tracking selections is efficient, requiring
+        # only a small set and an occasional reselection.  For
+        # a larger number of selections, the pool tracking method is
+        # preferred since the list takes less space than the
+        # set and it doesn't suffer from frequent reselections.
+
+        if isinstance(population, _Set):
+            population = list(population)
+        if not isinstance(population, _Sequence):
+            raise TypeError("Population must be a sequence or set.  For dicts, use list(d).")
+        randbelow = self._randbelow
+        n = len(population)
+        if not (0 <= k and k <= n):
+            raise ValueError("Sample larger than population")
+        result = [cast(T, None)] * k
+        setsize = 21        # size of a small set minus size of an empty list
+        if k > 5:
+            setsize += 4 ** _ceil(_log(k * 3, 4)) # table size for big sets
+        if n <= setsize:
+            # An n-length list is smaller than a k-length set
+            pool = list(population)
+            for i in range(k):         # invariant:  non-selected at [0,n-i)
+                j = randbelow(n-i)
+                result[i] = pool[j]
+                pool[j] = pool[n-i-1]   # move non-selected item into vacancy
+        else:
+            selected = set()  # type: Set[int]
+            selected_add = selected.add
+            for i in range(k):
+                j = randbelow(n)
+                while j in selected:
+                    j = randbelow(n)
+                selected_add(j)
+                result[i] = population[j]
+        return result
+
+## -------------------- real-valued distributions  -------------------
+
+## -------------------- uniform distribution -------------------
+
+    def uniform(self, a: float, b: float) -> float:
+        "Get a random number in the range [a, b) or [a, b] depending on rounding."
+        return a + (b-a) * self.random()
+
+## -------------------- triangular --------------------
+
+    def triangular(self, low: float = 0.0, high: float = 1.0,
+                   mode: float = None) -> float:
+        """Triangular distribution.
+
+        Continuous distribution bounded by given lower and upper limits,
+        and having a given mode value in-between.
+
+        http://en.wikipedia.org/wiki/Triangular_distribution
+
+        """
+        u = self.random()
+        c = 0.5 if mode is None else (mode - low) / (high - low)
+        if u > c:
+            u = 1.0 - u
+            c = 1.0 - c
+            low, high = high, low
+        return low + (high - low) * (u * c) ** 0.5
+
+## -------------------- normal distribution --------------------
+
+    def normalvariate(self, mu: float, sigma: float) -> float:
+        """Normal distribution.
+
+        mu is the mean, and sigma is the standard deviation.
+
+        """
+        # mu = mean, sigma = standard deviation
+
+        # Uses Kinderman and Monahan method. Reference: Kinderman,
+        # A.J. and Monahan, J.F., "Computer generation of random
+        # variables using the ratio of uniform deviates", ACM Trans
+        # Math Software, 3, (1977), pp257-260.
+
+        random = self.random
+        while 1:
+            u1 = random()
+            u2 = 1.0 - random()
+            z = NV_MAGICCONST*(u1-0.5)/u2
+            zz = z*z/4.0
+            if zz <= -_log(u2):
+                break
+        return mu + z*sigma
+
+## -------------------- lognormal distribution --------------------
+
+    def lognormvariate(self, mu: float, sigma: float) -> float:
+        """Log normal distribution.
+
+        If you take the natural logarithm of this distribution, you'll get a
+        normal distribution with mean mu and standard deviation sigma.
+        mu can have any value, and sigma must be greater than zero.
+
+        """
+        return _exp(self.normalvariate(mu, sigma))
+
+## -------------------- exponential distribution --------------------
+
+    def expovariate(self, lambd: float) -> float:
+        """Exponential distribution.
+
+        lambd is 1.0 divided by the desired mean.  It should be
+        nonzero.  (The parameter would be called "lambda", but that is
+        a reserved word in Python.)  Returned values range from 0 to
+        positive infinity if lambd is positive, and from negative
+        infinity to 0 if lambd is negative.
+
+        """
+        # lambd: rate lambd = 1/mean
+        # ('lambda' is a Python reserved word)
+
+        # we use 1-random() instead of random() to preclude the
+        # possibility of taking the log of zero.
+        return -_log(1.0 - self.random())/lambd
+
+## -------------------- von Mises distribution --------------------
+
+    def vonmisesvariate(self, mu: float, kappa: float) -> float:
+        """Circular data distribution.
+
+        mu is the mean angle, expressed in radians between 0 and 2*pi, and
+        kappa is the concentration parameter, which must be greater than or
+        equal to zero.  If kappa is equal to zero, this distribution reduces
+        to a uniform random angle over the range 0 to 2*pi.
+
+        """
+        # mu:    mean angle (in radians between 0 and 2*pi)
+        # kappa: concentration parameter kappa (>= 0)
+        # if kappa = 0 generate uniform random angle
+
+        # Based upon an algorithm published in: Fisher, N.I.,
+        # "Statistical Analysis of Circular Data", Cambridge
+        # University Press, 1993.
+
+        # Thanks to Magnus Kessler for a correction to the
+        # implementation of step 4.
+
+        random = self.random
+        if kappa <= 1e-6:
+            return TWOPI * random()
+
+        a = 1.0 + _sqrt(1.0 + 4.0 * kappa * kappa)
+        b = (a - _sqrt(2.0 * a))/(2.0 * kappa)
+        r = (1.0 + b * b)/(2.0 * b)
+
+        while 1:
+            u1 = random()
+
+            z = _cos(_pi * u1)
+            f = (1.0 + r * z)/(r + z)
+            c = kappa * (r - f)
+
+            u2 = random()
+
+            if u2 < c * (2.0 - c) or u2 <= c * _exp(1.0 - c):
+                break
+
+        u3 = random()
+        if u3 > 0.5:
+            theta = (mu % TWOPI) + _acos(f)
+        else:
+            theta = (mu % TWOPI) - _acos(f)
+
+        return theta
+
+## -------------------- gamma distribution --------------------
+
+    def gammavariate(self, alpha: float, beta: float) -> float:
+        """Gamma distribution.  Not the gamma function!
+
+        Conditions on the parameters are alpha > 0 and beta > 0.
+
+        The probability distribution function is:
+
+                    x ** (alpha - 1) * math.exp(-x / beta)
+          pdf(x) =  --------------------------------------
+                      math.gamma(alpha) * beta ** alpha
+
+        """
+
+        # alpha > 0, beta > 0, mean is alpha*beta, variance is alpha*beta**2
+
+        # Warning: a few older sources define the gamma distribution in terms
+        # of alpha > -1.0
+        if alpha <= 0.0 or beta <= 0.0:
+            raise ValueError('gammavariate: alpha and beta must be > 0.0')
+
+        random = self.random
+        if alpha > 1.0:
+
+            # Uses R.C.H. Cheng, "The generation of Gamma
+            # variables with non-integral shape parameters",
+            # Applied Statistics, (1977), 26, No. 1, p71-74
+
+            ainv = _sqrt(2.0 * alpha - 1.0)
+            bbb = alpha - LOG4
+            ccc = alpha + ainv
+
+            while 1:
+                u1 = random()
+                if not (1e-7 < u1 and u1 < .9999999):
+                    continue
+                u2 = 1.0 - random()
+                v = _log(u1/(1.0-u1))/ainv
+                x = alpha*_exp(v)
+                z = u1*u1*u2
+                r = bbb+ccc*v-x
+                if r + SG_MAGICCONST - 4.5*z >= 0.0 or r >= _log(z):
+                    return x * beta
+
+        elif alpha == 1.0:
+            # expovariate(1)
+            u = random()
+            while u <= 1e-7:
+                u = random()
+            return -_log(u) * beta
+
+        else:   # alpha is between 0 and 1 (exclusive)
+
+            # Uses ALGORITHM GS of Statistical Computing - Kennedy & Gentle
+
+            while 1:
+                u = random()
+                b = (_e + alpha)/_e
+                p = b*u
+                if p <= 1.0:
+                    x = p ** (1.0/alpha)
+                else:
+                    x = -_log((b-p)/alpha)
+                u1 = random()
+                if p > 1.0:
+                    if u1 <= x ** (alpha - 1.0):
+                        break
+                elif u1 <= _exp(-x):
+                    break
+            return x * beta
+
+## -------------------- Gauss (faster alternative) --------------------
+
+    def gauss(self, mu: float, sigma: float) -> float:
+        """Gaussian distribution.
+
+        mu is the mean, and sigma is the standard deviation.  This is
+        slightly faster than the normalvariate() function.
+
+        Not thread-safe without a lock around calls.
+
+        """
+
+        # When x and y are two variables from [0, 1), uniformly
+        # distributed, then
+        #
+        #    cos(2*pi*x)*sqrt(-2*log(1-y))
+        #    sin(2*pi*x)*sqrt(-2*log(1-y))
+        #
+        # are two *independent* variables with normal distribution
+        # (mu = 0, sigma = 1).
+        # (Lambert Meertens)
+        # (corrected version; bug discovered by Mike Miller, fixed by LM)
+
+        # Multithreading note: When two threads call this function
+        # simultaneously, it is possible that they will receive the
+        # same return value.  The window is very small though.  To
+        # avoid this, you have to use a lock around all calls.  (I
+        # didn't want to slow this down in the serial case by using a
+        # lock here.)
+
+        random = self.random
+        z = self.gauss_next
+        self.gauss_next = None
+        if z is None:
+            x2pi = random() * TWOPI
+            g2rad = _sqrt(-2.0 * _log(1.0 - random()))
+            z = _cos(x2pi) * g2rad
+            self.gauss_next = _sin(x2pi) * g2rad
+
+        return mu + z*sigma
+
+## -------------------- beta --------------------
+## See
+## http://mail.python.org/pipermail/python-bugs-list/2001-January/003752.html
+## for Ivan Frohne's insightful analysis of why the original implementation:
+##
+##    def betavariate(self, alpha, beta):
+##        # Discrete Event Simulation in C, pp 87-88.
+##
+##        y = self.expovariate(alpha)
+##        z = self.expovariate(1.0/beta)
+##        return z/(y+z)
+##
+## was dead wrong, and how it probably got that way.
+
+    def betavariate(self, alpha: float, beta: float) -> 'float':
+        """Beta distribution.
+
+        Conditions on the parameters are alpha > 0 and beta > 0.
+        Returned values range between 0 and 1.
+
+        """
+
+        # This version due to Janne Sinkkonen, and matches all the std
+        # texts (e.g., Knuth Vol 2 Ed 3 pg 134 "the beta distribution").
+        y = self.gammavariate(alpha, 1.)
+        if y == 0:
+            return 0.0
+        else:
+            return y / (y + self.gammavariate(beta, 1.))
+
+## -------------------- Pareto --------------------
+
+    def paretovariate(self, alpha: float) -> float:
+        """Pareto distribution.  alpha is the shape parameter."""
+        # Jain, pg. 495
+
+        u = 1.0 - self.random()
+        return 1.0 / u ** (1.0/alpha)
+
+## -------------------- Weibull --------------------
+
+    def weibullvariate(self, alpha: float, beta: float) -> float:
+        """Weibull distribution.
+
+        alpha is the scale parameter and beta is the shape parameter.
+
+        """
+        # Jain, pg. 499; bug fix courtesy Bill Arms
+
+        u = 1.0 - self.random()
+        return alpha * (-_log(u)) ** (1.0/beta)
+
+## --------------- Operating System Random Source  ------------------
+
+class SystemRandom(Random):
+    """Alternate random number generator using sources provided
+    by the operating system (such as /dev/urandom on Unix or
+    CryptGenRandom on Windows).
+
+     Not available on all systems (see os.urandom() for details).
+    """
+
+    def random(self) -> float:
+        """Get the next random number in the range [0.0, 1.0)."""
+        return (int.from_bytes(_urandom(7), 'big') >> 3) * RECIP_BPF
+
+    def getrandbits(self, k: int) -> int:
+        """getrandbits(k) -> x.  Generates a long int with k random bits."""
+        if k <= 0:
+            raise ValueError('number of bits must be greater than zero')
+        if k != int(k):
+            raise TypeError('number of bits should be an integer')
+        numbytes = (k + 7) // 8                       # bits / 8 and rounded up
+        x = int.from_bytes(_urandom(numbytes), 'big')
+        return x >> (numbytes * 8 - k)                # trim excess bits
+
+    def seed(self, a: object = None, version: int = None) -> None:
+        "Stub method.  Not used for a system random number generator."
+        return
+
+    def _notimplemented(self, *args: Any, **kwds: Any) -> Any:
+        "Method should not be called for a system random number generator."
+        raise NotImplementedError('System entropy source does not have state.')
+    getstate = setstate = _notimplemented
+
+# Create one instance, seeded from current time, and export its methods
+# as module-level functions.  The functions share state across all uses
+#(both in the user's code and in the Python libraries), but that's fine
+# for most programs and is easier for the casual user than making them
+# instantiate their own Random() instance.
+
+_inst = Random()
+seed = _inst.seed
+random = _inst.random
+uniform = _inst.uniform
+triangular = _inst.triangular
+randint = _inst.randint
+choice = _inst.choice
+randrange = _inst.randrange
+sample = _inst.sample
+shuffle = _inst.shuffle
+normalvariate = _inst.normalvariate
+lognormvariate = _inst.lognormvariate
+expovariate = _inst.expovariate
+vonmisesvariate = _inst.vonmisesvariate
+gammavariate = _inst.gammavariate
+gauss = _inst.gauss
+betavariate = _inst.betavariate
+paretovariate = _inst.paretovariate
+weibullvariate = _inst.weibullvariate
+getstate = _inst.getstate
+setstate = _inst.setstate
+getrandbits = _inst.getrandbits
+
+## -------------------- test program --------------------
+
+def _test_generator(n: int, func: Any, args: tuple) -> None:
+    import time
+    print(n, 'times', func.__name__)
+    total = 0.0
+    sqsum = 0.0
+    smallest = 1e10
+    largest = -1e10
+    t0 = time.time()
+    for i in range(n):
+        x = func(*args) # type: float
+        total += x
+        sqsum = sqsum + x*x
+        smallest = min(x, smallest)
+        largest = max(x, largest)
+    t1 = time.time()
+    print(round(t1-t0, 3), 'sec,', end=' ')
+    avg = total/n
+    stddev = _sqrt(sqsum/n - avg*avg)
+    print('avg %g, stddev %g, min %g, max %g' % \
+              (avg, stddev, smallest, largest))
+
+
+def _test(N: int = 2000) -> None:
+    _test_generator(N, random, ())
+    _test_generator(N, normalvariate, (0.0, 1.0))
+    _test_generator(N, lognormvariate, (0.0, 1.0))
+    _test_generator(N, vonmisesvariate, (0.0, 1.0))
+    _test_generator(N, gammavariate, (0.01, 1.0))
+    _test_generator(N, gammavariate, (0.1, 1.0))
+    _test_generator(N, gammavariate, (0.1, 2.0))
+    _test_generator(N, gammavariate, (0.5, 1.0))
+    _test_generator(N, gammavariate, (0.9, 1.0))
+    _test_generator(N, gammavariate, (1.0, 1.0))
+    _test_generator(N, gammavariate, (2.0, 1.0))
+    _test_generator(N, gammavariate, (20.0, 1.0))
+    _test_generator(N, gammavariate, (200.0, 1.0))
+    _test_generator(N, gauss, (0.0, 1.0))
+    _test_generator(N, betavariate, (3.0, 3.0))
+    _test_generator(N, triangular, (0.0, 1.0, 1.0/3.0))
+
+if __name__ == '__main__':
+    _test()
diff --git a/test-data/stdlib-samples/3.2/shutil.py b/test-data/stdlib-samples/3.2/shutil.py
new file mode 100644
index 0000000..7204a4d
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/shutil.py
@@ -0,0 +1,790 @@
+"""Utility functions for copying and archiving files and directory trees.
+
+XXX The functions here don't copy the resource fork or other metadata on Mac.
+
+"""
+
+import os
+import sys
+import stat
+from os.path import abspath
+import fnmatch
+import collections
+import errno
+import tarfile
+import builtins
+
+from typing import (
+    Any, AnyStr, IO, List, Iterable, Callable, Tuple, Dict, Sequence, cast
+)
+from types import TracebackType
+
+try:
+    import bz2
+    _BZ2_SUPPORTED = True
+except ImportError:
+    _BZ2_SUPPORTED = False
+
+try:
+    from pwd import getpwnam as _getpwnam
+    getpwnam = _getpwnam
+except ImportError:
+    getpwnam = None
+
+try:
+    from grp import getgrnam as _getgrnam
+    getgrnam = _getgrnam
+except ImportError:
+    getgrnam = None
+
+__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
+           "copytree", "move", "rmtree", "Error", "SpecialFileError",
+           "ExecError", "make_archive", "get_archive_formats",
+           "register_archive_format", "unregister_archive_format",
+           "get_unpack_formats", "register_unpack_format",
+           "unregister_unpack_format", "unpack_archive", "ignore_patterns"]
+
+class Error(EnvironmentError):
+    pass
+
+class SpecialFileError(EnvironmentError):
+    """Raised when trying to do a kind of operation (e.g. copying) which is
+    not supported on a special file (e.g. a named pipe)"""
+
+class ExecError(EnvironmentError):
+    """Raised when a command could not be executed"""
+
+class ReadError(EnvironmentError):
+    """Raised when an archive cannot be read"""
+
+class RegistryError(Exception):
+    """Raised when a registery operation with the archiving
+    and unpacking registeries fails"""
+
+
+try:
+    _WindowsError = WindowsError # type: type
+except NameError:
+    _WindowsError = None
+
+
+# Function aliases to be patched in test cases
+rename = os.rename
+open = builtins.open
+
+
+def copyfileobj(fsrc: IO[AnyStr], fdst: IO[AnyStr],
+                length: int = 16*1024) -> None:
+    """copy data from file-like object fsrc to file-like object fdst"""
+    while 1:
+        buf = fsrc.read(length)
+        if not buf:
+            break
+        fdst.write(buf)
+
+def _samefile(src: str, dst: str) -> bool:
+    # Macintosh, Unix.
+    if hasattr(os.path, 'samefile'):
+        try:
+            return os.path.samefile(src, dst)
+        except OSError:
+            return False
+
+    # All other platforms: check for same pathname.
+    return (os.path.normcase(os.path.abspath(src)) ==
+            os.path.normcase(os.path.abspath(dst)))
+
+def copyfile(src: str, dst: str) -> None:
+    """Copy data from src to dst"""
+    if _samefile(src, dst):
+        raise Error("`%s` and `%s` are the same file" % (src, dst))
+
+    for fn in [src, dst]:
+        try:
+            st = os.stat(fn)
+        except OSError:
+            # File most likely does not exist
+            pass
+        else:
+            # XXX What about other special files? (sockets, devices...)
+            if stat.S_ISFIFO(st.st_mode):
+                raise SpecialFileError("`%s` is a named pipe" % fn)
+
+    with open(src, 'rb') as fsrc:
+        with open(dst, 'wb') as fdst:
+            copyfileobj(fsrc, fdst)
+
+def copymode(src: str, dst: str) -> None:
+    """Copy mode bits from src to dst"""
+    if hasattr(os, 'chmod'):
+        st = os.stat(src)
+        mode = stat.S_IMODE(st.st_mode)
+        os.chmod(dst, mode)
+
+def copystat(src: str, dst: str) -> None:
+    """Copy all stat info (mode bits, atime, mtime, flags) from src to dst"""
+    st = os.stat(src)
+    mode = stat.S_IMODE(st.st_mode)
+    if hasattr(os, 'utime'):
+        os.utime(dst, (st.st_atime, st.st_mtime))
+    if hasattr(os, 'chmod'):
+        os.chmod(dst, mode)
+    if hasattr(os, 'chflags') and hasattr(st, 'st_flags'):
+        try:
+            os.chflags(dst, st.st_flags)
+        except OSError as why:
+            if (not hasattr(errno, 'EOPNOTSUPP') or
+                why.errno != errno.EOPNOTSUPP):
+                raise
+
+def copy(src: str, dst: str) -> None:
+    """Copy data and mode bits ("cp src dst").
+
+    The destination may be a directory.
+
+    """
+    if os.path.isdir(dst):
+        dst = os.path.join(dst, os.path.basename(src))
+    copyfile(src, dst)
+    copymode(src, dst)
+
+def copy2(src: str, dst: str) -> None:
+    """Copy data and all stat info ("cp -p src dst").
+
+    The destination may be a directory.
+
+    """
+    if os.path.isdir(dst):
+        dst = os.path.join(dst, os.path.basename(src))
+    copyfile(src, dst)
+    copystat(src, dst)
+
+def ignore_patterns(*patterns: str) -> Callable[[str, List[str]],
+                                                Iterable[str]]:
+    """Function that can be used as copytree() ignore parameter.
+
+    Patterns is a sequence of glob-style patterns
+    that are used to exclude files"""
+    def _ignore_patterns(path: str, names: List[str]) -> Iterable[str]:
+        ignored_names = []  # type: List[str]
+        for pattern in patterns:
+            ignored_names.extend(fnmatch.filter(names, pattern))
+        return set(ignored_names)
+    return _ignore_patterns
+
+def copytree(src: str, dst: str, symlinks: bool = False,
+             ignore: Callable[[str, List[str]], Iterable[str]] = None,
+             copy_function: Callable[[str, str], None] = copy2,
+             ignore_dangling_symlinks: bool = False) -> None:
+    """Recursively copy a directory tree.
+
+    The destination directory must not already exist.
+    If exception(s) occur, an Error is raised with a list of reasons.
+
+    If the optional symlinks flag is true, symbolic links in the
+    source tree result in symbolic links in the destination tree; if
+    it is false, the contents of the files pointed to by symbolic
+    links are copied. If the file pointed by the symlink doesn't
+    exist, an exception will be added in the list of errors raised in
+    an Error exception at the end of the copy process.
+
+    You can set the optional ignore_dangling_symlinks flag to true if you
+    want to silence this exception. Notice that this has no effect on
+    platforms that don't support os.symlink.
+
+    The optional ignore argument is a callable. If given, it
+    is called with the `src` parameter, which is the directory
+    being visited by copytree(), and `names` which is the list of
+    `src` contents, as returned by os.listdir():
+
+        callable(src, names) -> ignored_names
+
+    Since copytree() is called recursively, the callable will be
+    called once for each directory that is copied. It returns a
+    list of names relative to the `src` directory that should
+    not be copied.
+
+    The optional copy_function argument is a callable that will be used
+    to copy each file. It will be called with the source path and the
+    destination path as arguments. By default, copy2() is used, but any
+    function that supports the same signature (like copy()) can be used.
+
+    """
+    names = os.listdir(src)
+    if ignore is not None:
+        ignored_names = ignore(src, names)
+    else:
+        ignored_names = set()
+
+    os.makedirs(dst)
+    errors = []  # type: List[Tuple[str, str, str]]
+    for name in names:
+        if name in ignored_names:
+            continue
+        srcname = os.path.join(src, name)
+        dstname = os.path.join(dst, name)
+        try:
+            if os.path.islink(srcname):
+                linkto = os.readlink(srcname)
+                if symlinks:
+                    os.symlink(linkto, dstname)
+                else:
+                    # ignore dangling symlink if the flag is on
+                    if not os.path.exists(linkto) and ignore_dangling_symlinks:
+                        continue
+                    # otherwise let the copy occurs. copy2 will raise an error
+                    copy_function(srcname, dstname)
+            elif os.path.isdir(srcname):
+                copytree(srcname, dstname, symlinks, ignore, copy_function)
+            else:
+                # Will raise a SpecialFileError for unsupported file types
+                copy_function(srcname, dstname)
+        # catch the Error from the recursive copytree so that we can
+        # continue with other files
+        except Error as err:
+            errors.extend(err.args[0])
+        except EnvironmentError as why:
+            errors.append((srcname, dstname, str(why)))
+    try:
+        copystat(src, dst)
+    except OSError as why:
+        if _WindowsError is not None and isinstance(why, _WindowsError):
+            # Copying file access times may fail on Windows
+            pass
+        else:
+            errors.append((src, dst, str(why)))
+    if errors:
+        raise Error(errors)
+
+def rmtree(path: str, ignore_errors: bool = False,
+           onerror: Callable[[Any, str, Tuple[type, BaseException, TracebackType]],
+                              None] = None) -> None:
+    """Recursively delete a directory tree.
+
+    If ignore_errors is set, errors are ignored; otherwise, if onerror
+    is set, it is called to handle the error with arguments (func,
+    path, exc_info) where func is os.listdir, os.remove, or os.rmdir;
+    path is the argument to that function that caused it to fail; and
+    exc_info is a tuple returned by sys.exc_info().  If ignore_errors
+    is false and onerror is None, an exception is raised.
+
+    """
+    if ignore_errors:
+        def _onerror(x: Any, y: str,
+                     z: Tuple[type, BaseException, TracebackType]) -> None:
+            pass
+        onerror = _onerror
+    elif onerror is None:
+        def __onerror(x: Any, y: str,
+                      z: Tuple[type, BaseException, TracebackType]) -> None:
+            raise
+        onerror = __onerror
+    try:
+        if os.path.islink(path):
+            # symlinks to directories are forbidden, see bug #1669
+            raise OSError("Cannot call rmtree on a symbolic link")
+    except OSError:
+        onerror(os.path.islink, path, sys.exc_info())
+        # can't continue even if onerror hook returns
+        return
+    names = []  # type: List[str]
+    try:
+        names = os.listdir(path)
+    except os.error as err:
+        onerror(os.listdir, path, sys.exc_info())
+    for name in names:
+        fullname = os.path.join(path, name)
+        try:
+            mode = os.lstat(fullname).st_mode
+        except os.error:
+            mode = 0
+        if stat.S_ISDIR(mode):
+            rmtree(fullname, ignore_errors, onerror)
+        else:
+            try:
+                os.remove(fullname)
+            except os.error as err:
+                onerror(os.remove, fullname, sys.exc_info())
+    try:
+        os.rmdir(path)
+    except os.error:
+        onerror(os.rmdir, path, sys.exc_info())
+
+
+def _basename(path: str) -> str:
+    # A basename() variant which first strips the trailing slash, if present.
+    # Thus we always get the last component of the path, even for directories.
+    return os.path.basename(path.rstrip(os.path.sep))
+
+def move(src: str, dst: str) -> None:
+    """Recursively move a file or directory to another location. This is
+    similar to the Unix "mv" command.
+
+    If the destination is a directory or a symlink to a directory, the source
+    is moved inside the directory. The destination path must not already
+    exist.
+
+    If the destination already exists but is not a directory, it may be
+    overwritten depending on os.rename() semantics.
+
+    If the destination is on our current filesystem, then rename() is used.
+    Otherwise, src is copied to the destination and then removed.
+    A lot more could be done here...  A look at a mv.c shows a lot of
+    the issues this implementation glosses over.
+
+    """
+    real_dst = dst
+    if os.path.isdir(dst):
+        if _samefile(src, dst):
+            # We might be on a case insensitive filesystem,
+            # perform the rename anyway.
+            os.rename(src, dst)
+            return
+
+        real_dst = os.path.join(dst, _basename(src))
+        if os.path.exists(real_dst):
+            raise Error("Destination path '%s' already exists" % real_dst)
+    try:
+        os.rename(src, real_dst)
+    except OSError as exc:
+        if os.path.isdir(src):
+            if _destinsrc(src, dst):
+                raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst))
+            copytree(src, real_dst, symlinks=True)
+            rmtree(src)
+        else:
+            copy2(src, real_dst)
+            os.unlink(src)
+
+def _destinsrc(src: str, dst: str) -> bool:
+    src = abspath(src)
+    dst = abspath(dst)
+    if not src.endswith(os.path.sep):
+        src += os.path.sep
+    if not dst.endswith(os.path.sep):
+        dst += os.path.sep
+    return dst.startswith(src)
+
+def _get_gid(name: str) -> int:
+    """Returns a gid, given a group name."""
+    if getgrnam is None or name is None:
+        return None
+    try:
+        result = getgrnam(name)
+    except KeyError:
+        result = None
+    if result is not None:
+        return result.gr_gid
+    return None
+
+def _get_uid(name: str) -> int:
+    """Returns an uid, given a user name."""
+    if getpwnam is None or name is None:
+        return None
+    try:
+        result = getpwnam(name)
+    except KeyError:
+        result = None
+    if result is not None:
+        return result.pw_uid
+    return None
+
+def _make_tarball(base_name: str, base_dir: str, compress: str = "gzip",
+                  verbose: bool = False, dry_run: bool = False,
+                  owner: str = None, group: str = None,
+                  logger: Any = None) -> str:
+    """Create a (possibly compressed) tar file from all the files under
+    'base_dir'.
+
+    'compress' must be "gzip" (the default), "bzip2", or None.
+
+    'owner' and 'group' can be used to define an owner and a group for the
+    archive that is being built. If not provided, the current owner and group
+    will be used.
+
+    The output tar file will be named 'base_name' +  ".tar", possibly plus
+    the appropriate compression extension (".gz", or ".bz2").
+
+    Returns the output filename.
+    """
+    tar_compression = {'gzip': 'gz', None: ''}
+    compress_ext = {'gzip': '.gz'}
+
+    if _BZ2_SUPPORTED:
+        tar_compression['bzip2'] = 'bz2'
+        compress_ext['bzip2'] = '.bz2'
+
+    # flags for compression program, each element of list will be an argument
+    if compress is not None and compress not in compress_ext.keys():
+        raise ValueError("bad value for 'compress', or compression format not "
+                         "supported : {0}".format(compress))
+
+    archive_name = base_name + '.tar' + compress_ext.get(compress, '')
+    archive_dir = os.path.dirname(archive_name)
+
+    if not os.path.exists(archive_dir):
+        if logger is not None:
+            logger.info("creating %s", archive_dir)
+        if not dry_run:
+            os.makedirs(archive_dir)
+
+    # creating the tarball
+    if logger is not None:
+        logger.info('Creating tar archive')
+
+    uid = _get_uid(owner)
+    gid = _get_gid(group)
+
+    def _set_uid_gid(tarinfo):
+        if gid is not None:
+            tarinfo.gid = gid
+            tarinfo.gname = group
+        if uid is not None:
+            tarinfo.uid = uid
+            tarinfo.uname = owner
+        return tarinfo
+
+    if not dry_run:
+        tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
+        try:
+            tar.add(base_dir, filter=_set_uid_gid)
+        finally:
+            tar.close()
+
+    return archive_name
+
+def _call_external_zip(base_dir: str, zip_filename: str, verbose: bool = False,
+                       dry_run: bool = False) -> None:
+    # XXX see if we want to keep an external call here
+    if verbose:
+        zipoptions = "-r"
+    else:
+        zipoptions = "-rq"
+    from distutils.errors import DistutilsExecError
+    from distutils.spawn import spawn
+    try:
+        spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run)
+    except DistutilsExecError:
+        # XXX really should distinguish between "couldn't find
+        # external 'zip' command" and "zip failed".
+        raise ExecError(("unable to create zip file '%s': "
+            "could neither import the 'zipfile' module nor "
+            "find a standalone zip utility") % zip_filename)
+
+def _make_zipfile(base_name: str, base_dir: str, verbose: bool = False,
+                  dry_run: bool = False, logger: Any = None) -> str:
+    """Create a zip file from all the files under 'base_dir'.
+
+    The output zip file will be named 'base_name' + ".zip".  Uses either the
+    "zipfile" Python module (if available) or the InfoZIP "zip" utility
+    (if installed and found on the default search path).  If neither tool is
+    available, raises ExecError.  Returns the name of the output zip
+    file.
+    """
+    zip_filename = base_name + ".zip"
+    archive_dir = os.path.dirname(base_name)
+
+    if not os.path.exists(archive_dir):
+        if logger is not None:
+            logger.info("creating %s", archive_dir)
+        if not dry_run:
+            os.makedirs(archive_dir)
+
+    # If zipfile module is not available, try spawning an external 'zip'
+    # command.
+    try:
+        import zipfile
+    except ImportError:
+        zipfile = None
+
+    if zipfile is None:
+        _call_external_zip(base_dir, zip_filename, verbose, dry_run)
+    else:
+        if logger is not None:
+            logger.info("creating '%s' and adding '%s' to it",
+                        zip_filename, base_dir)
+
+        if not dry_run:
+            zip = zipfile.ZipFile(zip_filename, "w",
+                                  compression=zipfile.ZIP_DEFLATED)
+
+            for dirpath, dirnames, filenames in os.walk(base_dir):
+                for name in filenames:
+                    path = os.path.normpath(os.path.join(dirpath, name))
+                    if os.path.isfile(path):
+                        zip.write(path, path)
+                        if logger is not None:
+                            logger.info("adding '%s'", path)
+            zip.close()
+
+    return zip_filename
+
+_ARCHIVE_FORMATS = {
+    'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
+    'tar':   (_make_tarball, [('compress', None)], "uncompressed tar file"),
+    'zip':   (_make_zipfile, [],"ZIP file")
+    } # type: Dict[str, Tuple[Any, Sequence[Tuple[str, str]], str]]
+
+if _BZ2_SUPPORTED:
+    _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')],
+                                "bzip2'ed tar-file")
+
+def get_archive_formats() -> List[Tuple[str, str]]:
+    """Returns a list of supported formats for archiving and unarchiving.
+
+    Each element of the returned sequence is a tuple (name, description)
+    """
+    formats = [(name, registry[2]) for name, registry in
+               _ARCHIVE_FORMATS.items()]
+    formats.sort()
+    return formats
+
+def register_archive_format(name: str, function: Any,
+                            extra_args: Sequence[Tuple[str, Any]] = None,
+                            description: str = '') -> None:
+    """Registers an archive format.
+
+    name is the name of the format. function is the callable that will be
+    used to create archives. If provided, extra_args is a sequence of
+    (name, value) tuples that will be passed as arguments to the callable.
+    description can be provided to describe the format, and will be returned
+    by the get_archive_formats() function.
+    """
+    if extra_args is None:
+        extra_args = []
+    if not callable(function):
+        raise TypeError('The %s object is not callable' % function)
+    if not isinstance(extra_args, (tuple, list)):
+        raise TypeError('extra_args needs to be a sequence')
+    for element in extra_args:
+        if not isinstance(element, (tuple, list)) or len(cast(tuple, element)) !=2 :
+            raise TypeError('extra_args elements are : (arg_name, value)')
+
+    _ARCHIVE_FORMATS[name] = (function, extra_args, description)
+
+def unregister_archive_format(name: str) -> None:
+    del _ARCHIVE_FORMATS[name]
+
+def make_archive(base_name: str, format: str, root_dir: str = None,
+                 base_dir: str = None, verbose: bool = False,
+                 dry_run: bool = False, owner: str = None,
+                 group: str = None, logger: Any = None) -> str:
+    """Create an archive file (eg. zip or tar).
+
+    'base_name' is the name of the file to create, minus any format-specific
+    extension; 'format' is the archive format: one of "zip", "tar", "bztar"
+    or "gztar".
+
+    'root_dir' is a directory that will be the root directory of the
+    archive; ie. we typically chdir into 'root_dir' before creating the
+    archive.  'base_dir' is the directory where we start archiving from;
+    ie. 'base_dir' will be the common prefix of all files and
+    directories in the archive.  'root_dir' and 'base_dir' both default
+    to the current directory.  Returns the name of the archive file.
+
+    'owner' and 'group' are used when creating a tar archive. By default,
+    uses the current owner and group.
+    """
+    save_cwd = os.getcwd()
+    if root_dir is not None:
+        if logger is not None:
+            logger.debug("changing into '%s'", root_dir)
+        base_name = os.path.abspath(base_name)
+        if not dry_run:
+            os.chdir(root_dir)
+
+    if base_dir is None:
+        base_dir = os.curdir
+
+    kwargs = {'dry_run': dry_run, 'logger': logger}
+
+    try:
+        format_info = _ARCHIVE_FORMATS[format]
+    except KeyError:
+        raise ValueError("unknown archive format '%s'" % format)
+
+    func = format_info[0]
+    for arg, val in format_info[1]:
+        kwargs[arg] = val
+
+    if format != 'zip':
+        kwargs['owner'] = owner
+        kwargs['group'] = group
+
+    try:
+        filename = func(base_name, base_dir, **kwargs)
+    finally:
+        if root_dir is not None:
+            if logger is not None:
+                logger.debug("changing back to '%s'", save_cwd)
+            os.chdir(save_cwd)
+
+    return filename
+
+
+def get_unpack_formats() -> List[Tuple[str, List[str], str]]:
+    """Returns a list of supported formats for unpacking.
+
+    Each element of the returned sequence is a tuple
+    (name, extensions, description)
+    """
+    formats = [(name, info[0], info[3]) for name, info in
+               _UNPACK_FORMATS.items()]
+    formats.sort()
+    return formats
+
+def _check_unpack_options(extensions: List[str], function: Any,
+                          extra_args: Sequence[Tuple[str, Any]]) -> None:
+    """Checks what gets registered as an unpacker."""
+    # first make sure no other unpacker is registered for this extension
+    existing_extensions = {}  # type: Dict[str, str]
+    for name, info in _UNPACK_FORMATS.items():
+        for ext in info[0]:
+            existing_extensions[ext] = name
+
+    for extension in extensions:
+        if extension in existing_extensions:
+            msg = '%s is already registered for "%s"'
+            raise RegistryError(msg % (extension,
+                                       existing_extensions[extension]))
+
+    if not callable(function):
+        raise TypeError('The registered function must be a callable')
+
+
+def register_unpack_format(name: str, extensions: List[str], function: Any,
+                           extra_args: Sequence[Tuple[str, Any]] = None,
+                           description: str = '') -> None:
+    """Registers an unpack format.
+
+    `name` is the name of the format. `extensions` is a list of extensions
+    corresponding to the format.
+
+    `function` is the callable that will be
+    used to unpack archives. The callable will receive archives to unpack.
+    If it's unable to handle an archive, it needs to raise a ReadError
+    exception.
+
+    If provided, `extra_args` is a sequence of
+    (name, value) tuples that will be passed as arguments to the callable.
+    description can be provided to describe the format, and will be returned
+    by the get_unpack_formats() function.
+    """
+    if extra_args is None:
+        extra_args = []
+    _check_unpack_options(extensions, function, extra_args)
+    _UNPACK_FORMATS[name] = extensions, function, extra_args, description
+
+def unregister_unpack_format(name: str) -> None:
+    """Removes the pack format from the registery."""
+    del _UNPACK_FORMATS[name]
+
+def _ensure_directory(path: str) -> None:
+    """Ensure that the parent directory of `path` exists"""
+    dirname = os.path.dirname(path)
+    if not os.path.isdir(dirname):
+        os.makedirs(dirname)
+
+def _unpack_zipfile(filename: str, extract_dir: str) -> None:
+    """Unpack zip `filename` to `extract_dir`
+    """
+    try:
+        import zipfile
+    except ImportError:
+        raise ReadError('zlib not supported, cannot unpack this archive.')
+
+    if not zipfile.is_zipfile(filename):
+        raise ReadError("%s is not a zip file" % filename)
+
+    zip = zipfile.ZipFile(filename)
+    try:
+        for info in zip.infolist():
+            name = info.filename
+
+            # don't extract absolute paths or ones with .. in them
+            if name.startswith('/') or '..' in name:
+                continue
+
+            target = os.path.join(extract_dir, *name.split('/'))
+            if not target:
+                continue
+
+            _ensure_directory(target)
+            if not name.endswith('/'):
+                # file
+                data = zip.read(info.filename)
+                f = open(target,'wb')
+                try:
+                    f.write(data)
+                finally:
+                    f.close()
+                    del data
+    finally:
+        zip.close()
+
+def _unpack_tarfile(filename: str, extract_dir: str) -> None:
+    """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
+    """
+    try:
+        tarobj = tarfile.open(filename)
+    except tarfile.TarError:
+        raise ReadError(
+            "%s is not a compressed or uncompressed tar file" % filename)
+    try:
+        tarobj.extractall(extract_dir)
+    finally:
+        tarobj.close()
+
+_UNPACK_FORMATS = {
+    'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"),
+    'tar':   (['.tar'], _unpack_tarfile, [], "uncompressed tar file"),
+    'zip':   (['.zip'], _unpack_zipfile, [], "ZIP file")
+    } # type: Dict[str, Tuple[List[str], Any, Sequence[Tuple[str, Any]], str]]
+
+if _BZ2_SUPPORTED:
+    _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [],
+                                "bzip2'ed tar-file")
+
+def _find_unpack_format(filename: str) -> str:
+    for name, info in _UNPACK_FORMATS.items():
+        for extension in info[0]:
+            if filename.endswith(extension):
+                return name
+    return None
+
+def unpack_archive(filename: str, extract_dir: str = None,
+                   format: str = None) -> None:
+    """Unpack an archive.
+
+    `filename` is the name of the archive.
+
+    `extract_dir` is the name of the target directory, where the archive
+    is unpacked. If not provided, the current working directory is used.
+
+    `format` is the archive format: one of "zip", "tar", or "gztar". Or any
+    other registered format. If not provided, unpack_archive will use the
+    filename extension and see if an unpacker was registered for that
+    extension.
+
+    In case none is found, a ValueError is raised.
+    """
+    if extract_dir is None:
+        extract_dir = os.getcwd()
+
+    if format is not None:
+        try:
+            format_info = _UNPACK_FORMATS[format]
+        except KeyError:
+            raise ValueError("Unknown unpack format '{0}'".format(format))
+
+        func = format_info[1]
+        func(filename, extract_dir, **dict(format_info[2]))
+    else:
+        # we need to look at the registered unpackers supported extensions
+        format = _find_unpack_format(filename)
+        if format is None:
+            raise ReadError("Unknown archive format '{0}'".format(filename))
+
+        func = _UNPACK_FORMATS[format][1]
+        kwargs = dict(_UNPACK_FORMATS[format][2])
+        func(filename, extract_dir, **kwargs)
diff --git a/test-data/stdlib-samples/3.2/subprocess.py b/test-data/stdlib-samples/3.2/subprocess.py
new file mode 100644
index 0000000..e286525
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/subprocess.py
@@ -0,0 +1,1703 @@
+# subprocess - Subprocesses with accessible I/O streams
+#
+# For more information about this module, see PEP 324.
+#
+# Copyright (c) 2003-2005 by Peter Astrand <astrand at lysator.liu.se>
+#
+# Licensed to PSF under a Contributor Agreement.
+# See http://www.python.org/2.4/license for licensing details.
+
+r"""subprocess - Subprocesses with accessible I/O streams
+
+This module allows you to spawn processes, connect to their
+input/output/error pipes, and obtain their return codes.  This module
+intends to replace several other, older modules and functions, like:
+
+os.system
+os.spawn*
+
+Information about how the subprocess module can be used to replace these
+modules and functions can be found below.
+
+
+
+Using the subprocess module
+===========================
+This module defines one class called Popen:
+
+class Popen(args, bufsize=0, executable=None,
+            stdin=None, stdout=None, stderr=None,
+            preexec_fn=None, close_fds=True, shell=False,
+            cwd=None, env=None, universal_newlines=False,
+            startupinfo=None, creationflags=0,
+            restore_signals=True, start_new_session=False, pass_fds=()):
+
+
+Arguments are:
+
+args should be a string, or a sequence of program arguments.  The
+program to execute is normally the first item in the args sequence or
+string, but can be explicitly set by using the executable argument.
+
+On POSIX, with shell=False (default): In this case, the Popen class
+uses os.execvp() to execute the child program.  args should normally
+be a sequence.  A string will be treated as a sequence with the string
+as the only item (the program to execute).
+
+On POSIX, with shell=True: If args is a string, it specifies the
+command string to execute through the shell.  If args is a sequence,
+the first item specifies the command string, and any additional items
+will be treated as additional shell arguments.
+
+On Windows: the Popen class uses CreateProcess() to execute the child
+program, which operates on strings.  If args is a sequence, it will be
+converted to a string using the list2cmdline method.  Please note that
+not all MS Windows applications interpret the command line the same
+way: The list2cmdline is designed for applications using the same
+rules as the MS C runtime.
+
+bufsize, if given, has the same meaning as the corresponding argument
+to the built-in open() function: 0 means unbuffered, 1 means line
+buffered, any other positive value means use a buffer of
+(approximately) that size.  A negative bufsize means to use the system
+default, which usually means fully buffered.  The default value for
+bufsize is 0 (unbuffered).
+
+stdin, stdout and stderr specify the executed programs' standard
+input, standard output and standard error file handles, respectively.
+Valid values are PIPE, an existing file descriptor (a positive
+integer), an existing file object, and None.  PIPE indicates that a
+new pipe to the child should be created.  With None, no redirection
+will occur; the child's file handles will be inherited from the
+parent.  Additionally, stderr can be STDOUT, which indicates that the
+stderr data from the applications should be captured into the same
+file handle as for stdout.
+
+On POSIX, if preexec_fn is set to a callable object, this object will be
+called in the child process just before the child is executed.  The use
+of preexec_fn is not thread safe, using it in the presence of threads
+could lead to a deadlock in the child process before the new executable
+is executed.
+
+If close_fds is true, all file descriptors except 0, 1 and 2 will be
+closed before the child process is executed.  The default for close_fds
+varies by platform:  Always true on POSIX.  True when stdin/stdout/stderr
+are None on Windows, false otherwise.
+
+pass_fds is an optional sequence of file descriptors to keep open between the
+parent and child.  Providing any pass_fds implicitly sets close_fds to true.
+
+if shell is true, the specified command will be executed through the
+shell.
+
+If cwd is not None, the current directory will be changed to cwd
+before the child is executed.
+
+On POSIX, if restore_signals is True all signals that Python sets to
+SIG_IGN are restored to SIG_DFL in the child process before the exec.
+Currently this includes the SIGPIPE, SIGXFZ and SIGXFSZ signals.  This
+parameter does nothing on Windows.
+
+On POSIX, if start_new_session is True, the setsid() system call will be made
+in the child process prior to executing the command.
+
+If env is not None, it defines the environment variables for the new
+process.
+
+If universal_newlines is true, the file objects stdout and stderr are
+opened as a text files, but lines may be terminated by any of '\n',
+the Unix end-of-line convention, '\r', the old Macintosh convention or
+'\r\n', the Windows convention.  All of these external representations
+are seen as '\n' by the Python program.  Note: This feature is only
+available if Python is built with universal newline support (the
+default).  Also, the newlines attribute of the file objects stdout,
+stdin and stderr are not updated by the communicate() method.
+
+The startupinfo and creationflags, if given, will be passed to the
+underlying CreateProcess() function.  They can specify things such as
+appearance of the main window and priority for the new process.
+(Windows only)
+
+
+This module also defines some shortcut functions:
+
+call(*popenargs, **kwargs):
+    Run command with arguments.  Wait for command to complete, then
+    return the returncode attribute.
+
+    The arguments are the same as for the Popen constructor.  Example:
+
+    >>> retcode = subprocess.call(["ls", "-l"])
+
+check_call(*popenargs, **kwargs):
+    Run command with arguments.  Wait for command to complete.  If the
+    exit code was zero then return, otherwise raise
+    CalledProcessError.  The CalledProcessError object will have the
+    return code in the returncode attribute.
+
+    The arguments are the same as for the Popen constructor.  Example:
+
+    >>> subprocess.check_call(["ls", "-l"])
+    0
+
+getstatusoutput(cmd):
+    Return (status, output) of executing cmd in a shell.
+
+    Execute the string 'cmd' in a shell with os.popen() and return a 2-tuple
+    (status, output).  cmd is actually run as '{ cmd ; } 2>&1', so that the
+    returned output will contain output or error messages. A trailing newline
+    is stripped from the output. The exit status for the command can be
+    interpreted according to the rules for the C function wait().  Example:
+
+    >>> subprocess.getstatusoutput('ls /bin/ls')
+    (0, '/bin/ls')
+    >>> subprocess.getstatusoutput('cat /bin/junk')
+    (256, 'cat: /bin/junk: No such file or directory')
+    >>> subprocess.getstatusoutput('/bin/junk')
+    (256, 'sh: /bin/junk: not found')
+
+getoutput(cmd):
+    Return output (stdout or stderr) of executing cmd in a shell.
+
+    Like getstatusoutput(), except the exit status is ignored and the return
+    value is a string containing the command's output.  Example:
+
+    >>> subprocess.getoutput('ls /bin/ls')
+    '/bin/ls'
+
+check_output(*popenargs, **kwargs):
+    Run command with arguments and return its output as a byte string.
+
+    If the exit code was non-zero it raises a CalledProcessError.  The
+    CalledProcessError object will have the return code in the returncode
+    attribute and output in the output attribute.
+
+    The arguments are the same as for the Popen constructor.  Example:
+
+    >>> output = subprocess.check_output(["ls", "-l", "/dev/null"])
+
+
+Exceptions
+----------
+Exceptions raised in the child process, before the new program has
+started to execute, will be re-raised in the parent.  Additionally,
+the exception object will have one extra attribute called
+'child_traceback', which is a string containing traceback information
+from the childs point of view.
+
+The most common exception raised is OSError.  This occurs, for
+example, when trying to execute a non-existent file.  Applications
+should prepare for OSErrors.
+
+A ValueError will be raised if Popen is called with invalid arguments.
+
+check_call() and check_output() will raise CalledProcessError, if the
+called process returns a non-zero return code.
+
+
+Security
+--------
+Unlike some other popen functions, this implementation will never call
+/bin/sh implicitly.  This means that all characters, including shell
+metacharacters, can safely be passed to child processes.
+
+
+Popen objects
+=============
+Instances of the Popen class have the following methods:
+
+poll()
+    Check if child process has terminated.  Returns returncode
+    attribute.
+
+wait()
+    Wait for child process to terminate.  Returns returncode attribute.
+
+communicate(input=None)
+    Interact with process: Send data to stdin.  Read data from stdout
+    and stderr, until end-of-file is reached.  Wait for process to
+    terminate.  The optional input argument should be a string to be
+    sent to the child process, or None, if no data should be sent to
+    the child.
+
+    communicate() returns a tuple (stdout, stderr).
+
+    Note: The data read is buffered in memory, so do not use this
+    method if the data size is large or unlimited.
+
+The following attributes are also available:
+
+stdin
+    If the stdin argument is PIPE, this attribute is a file object
+    that provides input to the child process.  Otherwise, it is None.
+
+stdout
+    If the stdout argument is PIPE, this attribute is a file object
+    that provides output from the child process.  Otherwise, it is
+    None.
+
+stderr
+    If the stderr argument is PIPE, this attribute is file object that
+    provides error output from the child process.  Otherwise, it is
+    None.
+
+pid
+    The process ID of the child process.
+
+returncode
+    The child return code.  A None value indicates that the process
+    hasn't terminated yet.  A negative value -N indicates that the
+    child was terminated by signal N (POSIX only).
+
+
+Replacing older functions with the subprocess module
+====================================================
+In this section, "a ==> b" means that b can be used as a replacement
+for a.
+
+Note: All functions in this section fail (more or less) silently if
+the executed program cannot be found; this module raises an OSError
+exception.
+
+In the following examples, we assume that the subprocess module is
+imported with "from subprocess import *".
+
+
+Replacing /bin/sh shell backquote
+---------------------------------
+output=`mycmd myarg`
+==>
+output = Popen(["mycmd", "myarg"], stdout=PIPE).communicate()[0]
+
+
+Replacing shell pipe line
+-------------------------
+output=`dmesg | grep hda`
+==>
+p1 = Popen(["dmesg"], stdout=PIPE)
+p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
+output = p2.communicate()[0]
+
+
+Replacing os.system()
+---------------------
+sts = os.system("mycmd" + " myarg")
+==>
+p = Popen("mycmd" + " myarg", shell=True)
+pid, sts = os.waitpid(p.pid, 0)
+
+Note:
+
+* Calling the program through the shell is usually not required.
+
+* It's easier to look at the returncode attribute than the
+  exitstatus.
+
+A more real-world example would look like this:
+
+try:
+    retcode = call("mycmd" + " myarg", shell=True)
+    if retcode < 0:
+        print("Child was terminated by signal", -retcode, file=sys.stderr)
+    else:
+        print("Child returned", retcode, file=sys.stderr)
+except OSError as e:
+    print("Execution failed:", e, file=sys.stderr)
+
+
+Replacing os.spawn*
+-------------------
+P_NOWAIT example:
+
+pid = os.spawnlp(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg")
+==>
+pid = Popen(["/bin/mycmd", "myarg"]).pid
+
+
+P_WAIT example:
+
+retcode = os.spawnlp(os.P_WAIT, "/bin/mycmd", "mycmd", "myarg")
+==>
+retcode = call(["/bin/mycmd", "myarg"])
+
+
+Vector example:
+
+os.spawnvp(os.P_NOWAIT, path, args)
+==>
+Popen([path] + args[1:])
+
+
+Environment example:
+
+os.spawnlpe(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg", env)
+==>
+Popen(["/bin/mycmd", "myarg"], env={"PATH": "/usr/bin"})
+"""
+
+import sys
+mswindows = (sys.platform == "win32")
+
+import io
+import os
+import traceback
+import gc
+import signal
+import builtins
+import warnings
+import errno
+
+from typing import (
+    Any, Tuple, List, Sequence, Callable, Mapping, cast, Set, Dict, IO,
+    TextIO, AnyStr
+)
+from types import TracebackType
+
+# Exception classes used by this module.
+class CalledProcessError(Exception):
+    """This exception is raised when a process run by check_call() or
+    check_output() returns a non-zero exit status.
+    The exit status will be stored in the returncode attribute;
+    check_output() will also store the output in the output attribute.
+    """
+    def __init__(self, returncode: int, cmd: str, output: Any = None) -> None:
+        self.returncode = returncode
+        self.cmd = cmd
+        self.output = output
+    def __str__(self) -> str:
+        return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode)
+
+
+if mswindows:
+    import threading
+    import msvcrt
+    import _subprocess
+    class STARTUPINFO:
+        dwFlags = 0
+        hStdInput = cast(Any, None)
+        hStdOutput = cast(Any, None)
+        hStdError = cast(Any, None)
+        wShowWindow = 0
+    class pywintypes:
+        error = IOError
+else:
+    import select
+    _has_poll = hasattr(select, 'poll')
+    import fcntl
+    import pickle
+
+    try:
+        import _posixsubprocess
+        have_posixsubprocess = True
+    except ImportError:
+        have_posixsubprocess = False
+        warnings.warn("The _posixsubprocess module is not being used. "
+                      "Child process reliability may suffer if your "
+                      "program uses threads.", RuntimeWarning)
+
+    # When select or poll has indicated that the file is writable,
+    # we can write up to _PIPE_BUF bytes without risk of blocking.
+    # POSIX defines PIPE_BUF as >= 512.
+    _PIPE_BUF = getattr(select, 'PIPE_BUF', 512) # type: int
+
+    _FD_CLOEXEC = getattr(fcntl, 'FD_CLOEXEC', 1) # type: int
+
+    def _set_cloexec(fd: int, cloexec: bool) -> None:
+        old = fcntl.fcntl(fd, fcntl.F_GETFD)
+        if cloexec:
+            fcntl.fcntl(fd, fcntl.F_SETFD, old | _FD_CLOEXEC)
+        else:
+            fcntl.fcntl(fd, fcntl.F_SETFD, old & ~_FD_CLOEXEC)
+
+    if have_posixsubprocess:
+        _create_pipe = _posixsubprocess.cloexec_pipe
+    else:
+        def __create_pipe() -> Tuple[int, int]:
+            fds = os.pipe()
+            _set_cloexec(fds[0], True)
+            _set_cloexec(fds[1], True)
+            return fds
+        _create_pipe = __create_pipe
+
+__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "getstatusoutput",
+           "getoutput", "check_output", "CalledProcessError"]
+
+if mswindows:
+    from _subprocess import (CREATE_NEW_CONSOLE, CREATE_NEW_PROCESS_GROUP,
+                             STD_INPUT_HANDLE, STD_OUTPUT_HANDLE,
+                             STD_ERROR_HANDLE, SW_HIDE,
+                             STARTF_USESTDHANDLES, STARTF_USESHOWWINDOW)
+
+    __all__.extend(["CREATE_NEW_CONSOLE", "CREATE_NEW_PROCESS_GROUP",
+                    "STD_INPUT_HANDLE", "STD_OUTPUT_HANDLE",
+                    "STD_ERROR_HANDLE", "SW_HIDE",
+                    "STARTF_USESTDHANDLES", "STARTF_USESHOWWINDOW"])
+try:
+    MAXFD = os.sysconf("SC_OPEN_MAX")
+except:
+    MAXFD = 256
+
+# This lists holds Popen instances for which the underlying process had not
+# exited at the time its __del__ method got called: those processes are wait()ed
+# for synchronously from _cleanup() when a new Popen object is created, to avoid
+# zombie processes.
+_active = []  # type: List[Popen]
+
+def _cleanup() -> None:
+    for inst in _active[:]:
+        res = inst._internal_poll(_deadstate=sys.maxsize)
+        if res is not None:
+            try:
+                _active.remove(inst)
+            except ValueError:
+                # This can happen if two threads create a new Popen instance.
+                # It's harmless that it was already removed, so ignore.
+                pass
+
+PIPE = -1
+STDOUT = -2
+
+
+def _eintr_retry_call(func: Any, *args: Any) -> Any:
+    while True:
+        try:
+            return func(*args)
+        except (OSError, IOError) as e:
+            if e.errno == errno.EINTR:
+                continue
+            raise
+
+
+def call(*popenargs: Any, **kwargs: Any) -> int:
+    """Run command with arguments.  Wait for command to complete, then
+    return the returncode attribute.
+
+    The arguments are the same as for the Popen constructor.  Example:
+
+    retcode = call(["ls", "-l"])
+    """
+    return Popen(*popenargs, **kwargs).wait()
+
+
+def check_call(*popenargs: Any, **kwargs: Any) -> int:
+    """Run command with arguments.  Wait for command to complete.  If
+    the exit code was zero then return, otherwise raise
+    CalledProcessError.  The CalledProcessError object will have the
+    return code in the returncode attribute.
+
+    The arguments are the same as for the Popen constructor.  Example:
+
+    check_call(["ls", "-l"])
+    """
+    retcode = call(*popenargs, **kwargs)
+    if retcode:
+        cmd = kwargs.get("args")
+        if cmd is None:
+            cmd = popenargs[0]
+        raise CalledProcessError(retcode, cmd)
+    return 0
+
+
+def check_output(*popenargs: Any, **kwargs: Any) -> bytes:
+    r"""Run command with arguments and return its output as a byte string.
+
+    If the exit code was non-zero it raises a CalledProcessError.  The
+    CalledProcessError object will have the return code in the returncode
+    attribute and output in the output attribute.
+
+    The arguments are the same as for the Popen constructor.  Example:
+
+    >>> check_output(["ls", "-l", "/dev/null"])
+    b'crw-rw-rw- 1 root root 1, 3 Oct 18  2007 /dev/null\n'
+
+    The stdout argument is not allowed as it is used internally.
+    To capture standard error in the result, use stderr=STDOUT.
+
+    >>> check_output(["/bin/sh", "-c",
+    ...               "ls -l non_existent_file ; exit 0"],
+    ...              stderr=STDOUT)
+    b'ls: non_existent_file: No such file or directory\n'
+    """
+    if 'stdout' in kwargs:
+        raise ValueError('stdout argument not allowed, it will be overridden.')
+    kwargs['stdout'] = PIPE
+    process = Popen(*popenargs, **kwargs)
+    output, unused_err = process.communicate()
+    retcode = process.poll()
+    if retcode:
+        cmd = kwargs.get("args")
+        if cmd is None:
+            cmd = popenargs[0]
+        raise CalledProcessError(retcode, cmd, output=output)
+    return output
+
+
+def list2cmdline(seq: Sequence[str]) -> str:
+    """
+    Translate a sequence of arguments into a command line
+    string, using the same rules as the MS C runtime:
+
+    1) Arguments are delimited by white space, which is either a
+       space or a tab.
+
+    2) A string surrounded by double quotation marks is
+       interpreted as a single argument, regardless of white space
+       contained within.  A quoted string can be embedded in an
+       argument.
+
+    3) A double quotation mark preceded by a backslash is
+       interpreted as a literal double quotation mark.
+
+    4) Backslashes are interpreted literally, unless they
+       immediately precede a double quotation mark.
+
+    5) If backslashes immediately precede a double quotation mark,
+       every pair of backslashes is interpreted as a literal
+       backslash.  If the number of backslashes is odd, the last
+       backslash escapes the next double quotation mark as
+       described in rule 3.
+    """
+
+    # See
+    # http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
+    # or search http://msdn.microsoft.com for
+    # "Parsing C++ Command-Line Arguments"
+    result = []  # type: List[str]
+    needquote = False
+    for arg in seq:
+        bs_buf = []  # type: List[str]
+
+        # Add a space to separate this argument from the others
+        if result:
+            result.append(' ')
+
+        needquote = (" " in arg) or ("\t" in arg) or not arg
+        if needquote:
+            result.append('"')
+
+        for c in arg:
+            if c == '\\':
+                # Don't know if we need to double yet.
+                bs_buf.append(c)
+            elif c == '"':
+                # Double backslashes.
+                result.append('\\' * len(bs_buf)*2)
+                bs_buf = []
+                result.append('\\"')
+            else:
+                # Normal char
+                if bs_buf:
+                    result.extend(bs_buf)
+                    bs_buf = []
+                result.append(c)
+
+        # Add remaining backslashes, if any.
+        if bs_buf:
+            result.extend(bs_buf)
+
+        if needquote:
+            result.extend(bs_buf)
+            result.append('"')
+
+    return ''.join(result)
+
+
+# Various tools for executing commands and looking at their output and status.
+#
+# NB This only works (and is only relevant) for POSIX.
+
+def getstatusoutput(cmd: str) -> Tuple[int, str]:
+    """Return (status, output) of executing cmd in a shell.
+
+    Execute the string 'cmd' in a shell with os.popen() and return a 2-tuple
+    (status, output).  cmd is actually run as '{ cmd ; } 2>&1', so that the
+    returned output will contain output or error messages.  A trailing newline
+    is stripped from the output.  The exit status for the command can be
+    interpreted according to the rules for the C function wait().  Example:
+
+    >>> import subprocess
+    >>> subprocess.getstatusoutput('ls /bin/ls')
+    (0, '/bin/ls')
+    >>> subprocess.getstatusoutput('cat /bin/junk')
+    (256, 'cat: /bin/junk: No such file or directory')
+    >>> subprocess.getstatusoutput('/bin/junk')
+    (256, 'sh: /bin/junk: not found')
+    """
+    pipe = os.popen('{ ' + cmd + '; } 2>&1', 'r')
+    text = pipe.read()
+    sts = pipe.close()
+    if sts is None: sts = 0
+    if text[-1:] == '\n': text = text[:-1]
+    return sts, text
+
+
+def getoutput(cmd: str) -> str:
+    """Return output (stdout or stderr) of executing cmd in a shell.
+
+    Like getstatusoutput(), except the exit status is ignored and the return
+    value is a string containing the command's output.  Example:
+
+    >>> import subprocess
+    >>> subprocess.getoutput('ls /bin/ls')
+    '/bin/ls'
+    """
+    return getstatusoutput(cmd)[1]
+
+
+_PLATFORM_DEFAULT_CLOSE_FDS = object()
+
+
+class Popen(object):
+    def __init__(self, args: Sequence[Any], bufsize: int = 0,
+                 executable: str = None, stdin: Any = None,
+                 stdout: Any = None, stderr: Any = None,
+                 preexec_fn: Callable[[], Any] = None,
+                 close_fds: Any = _PLATFORM_DEFAULT_CLOSE_FDS,
+                 shell: int = False, cwd: str = None,
+                 env: Mapping[str, str] = None,
+                 universal_newlines: int = False,
+                 startupinfo: 'STARTUPINFO' = None, creationflags: int = 0,
+                 restore_signals: bool = True, start_new_session: bool = False,
+                 pass_fds: Any = ()) -> None:
+        """Create new Popen instance."""
+        _cleanup()
+
+        self._child_created = False
+        if bufsize is None:
+            bufsize = 0  # Restore default
+        if not isinstance(bufsize, int):
+            raise TypeError("bufsize must be an integer")
+
+        if mswindows:
+            if preexec_fn is not None:
+                raise ValueError("preexec_fn is not supported on Windows "
+                                 "platforms")
+            any_stdio_set = (stdin is not None or stdout is not None or
+                             stderr is not None)
+            if close_fds is _PLATFORM_DEFAULT_CLOSE_FDS:
+                if any_stdio_set:
+                    close_fds = False
+                else:
+                    close_fds = True
+            elif close_fds and any_stdio_set:
+                raise ValueError(
+                        "close_fds is not supported on Windows platforms"
+                        " if you redirect stdin/stdout/stderr")
+        else:
+            # POSIX
+            if close_fds is _PLATFORM_DEFAULT_CLOSE_FDS:
+                close_fds = True
+            if pass_fds and not close_fds:
+                warnings.warn("pass_fds overriding close_fds.", RuntimeWarning)
+                close_fds = True
+            if startupinfo is not None:
+                raise ValueError("startupinfo is only supported on Windows "
+                                 "platforms")
+            if creationflags != 0:
+                raise ValueError("creationflags is only supported on Windows "
+                                 "platforms")
+
+        self.stdin = None # type: IO[Any]
+        self.stdout = None # type: IO[Any]
+        self.stderr = None # type: IO[Any]
+        self.pid = None # type: int
+        self.returncode = None # type: int
+        self.universal_newlines = universal_newlines
+
+        # Input and output objects. The general principle is like
+        # this:
+        #
+        # Parent                   Child
+        # ------                   -----
+        # p2cwrite   ---stdin--->  p2cread
+        # c2pread    <--stdout---  c2pwrite
+        # errread    <--stderr---  errwrite
+        #
+        # On POSIX, the child objects are file descriptors.  On
+        # Windows, these are Windows file handles.  The parent objects
+        # are file descriptors on both platforms.  The parent objects
+        # are -1 when not using PIPEs. The child objects are -1
+        # when not redirecting.
+
+        (p2cread, p2cwrite,
+         c2pread, c2pwrite,
+         errread, errwrite) = self._get_handles(stdin, stdout, stderr)
+
+        # We wrap OS handles *before* launching the child, otherwise a
+        # quickly terminating child could make our fds unwrappable
+        # (see #8458).
+
+        if mswindows:
+            if p2cwrite != -1:
+                p2cwrite = msvcrt.open_osfhandle(p2cwrite.Detach(), 0)
+            if c2pread != -1:
+                c2pread = msvcrt.open_osfhandle(c2pread.Detach(), 0)
+            if errread != -1:
+                errread = msvcrt.open_osfhandle(errread.Detach(), 0)
+
+        if p2cwrite != -1:
+            self.stdin = io.open(p2cwrite, 'wb', bufsize)
+            if self.universal_newlines:
+                self.stdin = io.TextIOWrapper(self.stdin, write_through=True)
+        if c2pread != -1:
+            self.stdout = io.open(c2pread, 'rb', bufsize)
+            if universal_newlines:
+                self.stdout = io.TextIOWrapper(self.stdout)
+        if errread != -1:
+            self.stderr = io.open(errread, 'rb', bufsize)
+            if universal_newlines:
+                self.stderr = io.TextIOWrapper(self.stderr)
+
+        try:
+            self._execute_child(args, executable, preexec_fn, close_fds,
+                                pass_fds, cwd, env, universal_newlines,
+                                startupinfo, creationflags, shell,
+                                p2cread, p2cwrite,
+                                c2pread, c2pwrite,
+                                errread, errwrite,
+                                restore_signals, start_new_session)
+        except:
+            # Cleanup if the child failed starting
+            for f in filter(None, [self.stdin, self.stdout, self.stderr]):
+                try:
+                    f.close()
+                except EnvironmentError:
+                    # Ignore EBADF or other errors
+                    pass
+            raise
+
+
+    def _translate_newlines(self, data: bytes, encoding: str) -> str:
+        data = data.replace(b"\r\n", b"\n").replace(b"\r", b"\n")
+        return data.decode(encoding)
+
+    def __enter__(self) -> 'Popen':
+        return self
+
+    def __exit__(self, type: type, value: BaseException,
+                 traceback: TracebackType) -> bool:
+        if self.stdout:
+            self.stdout.close()
+        if self.stderr:
+            self.stderr.close()
+        if self.stdin:
+            self.stdin.close()
+        # Wait for the process to terminate, to avoid zombies.
+        self.wait()
+
+    def __del__(self, _maxsize: int = sys.maxsize,
+                _active: List['Popen'] = _active) -> None:
+        # If __init__ hasn't had a chance to execute (e.g. if it
+        # was passed an undeclared keyword argument), we don't
+        # have a _child_created attribute at all.
+        if not getattr(self, '_child_created', False):
+            # We didn't get to successfully create a child process.
+            return
+        # In case the child hasn't been waited on, check if it's done.
+        self._internal_poll(_deadstate=_maxsize)
+        if self.returncode is None and _active is not None:
+            # Child is still running, keep us alive until we can wait on it.
+            _active.append(self)
+
+
+    def communicate(self, input: Any = None) -> Tuple[Any, Any]:
+        """Interact with process: Send data to stdin.  Read data from
+        stdout and stderr, until end-of-file is reached.  Wait for
+        process to terminate.  The optional input argument should be a
+        string to be sent to the child process, or None, if no data
+        should be sent to the child.
+
+        communicate() returns a tuple (stdout, stderr)."""
+
+        # Optimization: If we are only using one pipe, or no pipe at
+        # all, using select() or threads is unnecessary.
+        if [self.stdin, self.stdout, self.stderr].count(None) >= 2:
+            stdout = None # type: IO[Any]
+            stderr = None # type: IO[Any]
+            if self.stdin:
+                if input:
+                    try:
+                        self.stdin.write(input)
+                    except IOError as e:
+                        if e.errno != errno.EPIPE and e.errno != errno.EINVAL:
+                            raise
+                self.stdin.close()
+            elif self.stdout:
+                stdout = _eintr_retry_call(self.stdout.read)
+                self.stdout.close()
+            elif self.stderr:
+                stderr = _eintr_retry_call(self.stderr.read)
+                self.stderr.close()
+            self.wait()
+            return (stdout, stderr)
+
+        return self._communicate(input)
+
+
+    def poll(self) -> int:
+        return self._internal_poll()
+
+
+    if mswindows:
+        #
+        # Windows methods
+        #
+        def _get_handles(self, stdin: Any, stdout: Any,
+                         stderr: Any) -> Tuple[Any, Any, Any, Any, Any, Any]:
+            """Construct and return tuple with IO objects:
+            p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
+            """
+            if stdin is None and stdout is None and stderr is None:
+                return (-1, -1, -1, -1, -1, -1)
+
+            p2cread, p2cwrite = -1, -1 # type: (Any, Any)
+            c2pread, c2pwrite = -1, -1 # type: (Any, Any)
+            errread, errwrite = -1, -1 # type: (Any, Any)
+
+            if stdin is None:
+                p2cread = _subprocess.GetStdHandle(_subprocess.STD_INPUT_HANDLE)
+                if p2cread is None:
+                    p2cread, _ = _subprocess.CreatePipe(None, 0)
+            elif stdin == PIPE:
+                p2cread, p2cwrite = _subprocess.CreatePipe(None, 0)
+            elif isinstance(stdin, int):
+                p2cread = msvcrt.get_osfhandle(stdin)
+            else:
+                # Assuming file-like object
+                p2cread = msvcrt.get_osfhandle(stdin.fileno())
+            p2cread = self._make_inheritable(p2cread)
+
+            if stdout is None:
+                c2pwrite = _subprocess.GetStdHandle(_subprocess.STD_OUTPUT_HANDLE)
+                if c2pwrite is None:
+                    _, c2pwrite = _subprocess.CreatePipe(None, 0)
+            elif stdout == PIPE:
+                c2pread, c2pwrite = _subprocess.CreatePipe(None, 0)
+            elif isinstance(stdout, int):
+                c2pwrite = msvcrt.get_osfhandle(stdout)
+            else:
+                # Assuming file-like object
+                c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
+            c2pwrite = self._make_inheritable(c2pwrite)
+
+            if stderr is None:
+                errwrite = _subprocess.GetStdHandle(_subprocess.STD_ERROR_HANDLE)
+                if errwrite is None:
+                    _, errwrite = _subprocess.CreatePipe(None, 0)
+            elif stderr == PIPE:
+                errread, errwrite = _subprocess.CreatePipe(None, 0)
+            elif stderr == STDOUT:
+                errwrite = c2pwrite
+            elif isinstance(stderr, int):
+                errwrite = msvcrt.get_osfhandle(stderr)
+            else:
+                # Assuming file-like object
+                errwrite = msvcrt.get_osfhandle(stderr.fileno())
+            errwrite = self._make_inheritable(errwrite)
+
+            return (p2cread, p2cwrite,
+                    c2pread, c2pwrite,
+                    errread, errwrite)
+
+
+        def _make_inheritable(self, handle: _subprocess.Handle) -> int:
+            """Return a duplicate of handle, which is inheritable"""
+            return _subprocess.DuplicateHandle(_subprocess.GetCurrentProcess(),
+                                handle, _subprocess.GetCurrentProcess(), 0, 1,
+                                _subprocess.DUPLICATE_SAME_ACCESS)
+
+
+        def _find_w9xpopen(self) -> str:
+            """Find and return absolut path to w9xpopen.exe"""
+            w9xpopen = os.path.join(
+                            os.path.dirname(_subprocess.GetModuleFileName(0)),
+                                    "w9xpopen.exe")
+            if not os.path.exists(w9xpopen):
+                # Eeek - file-not-found - possibly an embedding
+                # situation - see if we can locate it in sys.exec_prefix
+                w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix),
+                                        "w9xpopen.exe")
+                if not os.path.exists(w9xpopen):
+                    raise RuntimeError("Cannot locate w9xpopen.exe, which is "
+                                       "needed for Popen to work with your "
+                                       "shell or platform.")
+            return w9xpopen
+
+
+        def _execute_child(self, args: Sequence[str], executable: str,
+                           preexec_fn: Callable[[], Any], close_fds: Any,
+                           pass_fds: Any, cwd: str, env: Mapping[str, str],
+                           universal_newlines: int,
+                           startupinfo: STARTUPINFO, creationflags: int,
+                           shell: int,
+                           p2cread: Any, p2cwrite: Any,
+                           c2pread: Any, c2pwrite: Any,
+                           errread: Any, errwrite: Any,
+                           restore_signals: bool,
+                           start_new_session: bool) -> None:
+            """Execute program (MS Windows version)"""
+
+            assert not pass_fds, "pass_fds not supported on Windows."
+
+            if not isinstance(args, str):
+                args = list2cmdline(args)
+
+            # Process startup details
+            if startupinfo is None:
+                startupinfo = STARTUPINFO()
+            if -1 not in (p2cread, c2pwrite, errwrite):
+                startupinfo.dwFlags |= _subprocess.STARTF_USESTDHANDLES
+                startupinfo.hStdInput = p2cread
+                startupinfo.hStdOutput = c2pwrite
+                startupinfo.hStdError = errwrite
+
+            if shell:
+                startupinfo.dwFlags |= _subprocess.STARTF_USESHOWWINDOW
+                startupinfo.wShowWindow = _subprocess.SW_HIDE
+                comspec = os.environ.get("COMSPEC", "cmd.exe")
+                args = '{} /c "{}"'.format (comspec, args)
+                if (_subprocess.GetVersion() >= 0x80000000 or
+                        os.path.basename(comspec).lower() == "command.com"):
+                    # Win9x, or using command.com on NT. We need to
+                    # use the w9xpopen intermediate program. For more
+                    # information, see KB Q150956
+                    # (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp)
+                    w9xpopen = self._find_w9xpopen()
+                    args = '"%s" %s' % (w9xpopen, args)
+                    # Not passing CREATE_NEW_CONSOLE has been known to
+                    # cause random failures on win9x.  Specifically a
+                    # dialog: "Your program accessed mem currently in
+                    # use at xxx" and a hopeful warning about the
+                    # stability of your system.  Cost is Ctrl+C won't
+                    # kill children.
+                    creationflags |= _subprocess.CREATE_NEW_CONSOLE
+
+            # Start the process
+            try:
+                hp, ht, pid, tid = _subprocess.CreateProcess(executable,
+                                         cast(str, args),
+                                         # no special security
+                                         None, None,
+                                         int(not close_fds),
+                                         creationflags,
+                                         env,
+                                         cwd,
+                                         startupinfo)
+            except pywintypes.error as e:
+                # Translate pywintypes.error to WindowsError, which is
+                # a subclass of OSError.  FIXME: We should really
+                # translate errno using _sys_errlist (or similar), but
+                # how can this be done from Python?
+                raise WindowsError(*e.args)
+            finally:
+                # Child is launched. Close the parent's copy of those pipe
+                # handles that only the child should have open.  You need
+                # to make sure that no handles to the write end of the
+                # output pipe are maintained in this process or else the
+                # pipe will not close when the child process exits and the
+                # ReadFile will hang.
+                if p2cread != -1:
+                    p2cread.Close()
+                if c2pwrite != -1:
+                    c2pwrite.Close()
+                if errwrite != -1:
+                    errwrite.Close()
+
+            # Retain the process handle, but close the thread handle
+            self._child_created = True
+            self._handle = hp
+            self.pid = pid
+            ht.Close()
+
+        def _internal_poll(self, _deadstate: int = None) -> int:
+            """Check if child process has terminated.  Returns returncode
+            attribute.
+
+            This method is called by __del__, so it can only refer to objects
+            in its local scope.
+
+            """
+            return self._internal_poll_win(_deadstate)
+
+        from _subprocess import Handle
+
+        def _internal_poll_win(self, _deadstate: int = None,
+                _WaitForSingleObject: Callable[[Handle, int], int] =
+                               _subprocess.WaitForSingleObject,
+                _WAIT_OBJECT_0: int = _subprocess.WAIT_OBJECT_0,
+                _GetExitCodeProcess: Callable[[Handle], int] =
+                                    _subprocess.GetExitCodeProcess) -> int:
+            if self.returncode is None:
+                if _WaitForSingleObject(self._handle, 0) == _WAIT_OBJECT_0:
+                    self.returncode = _GetExitCodeProcess(self._handle)
+            return self.returncode
+
+
+        def wait(self) -> int:
+            """Wait for child process to terminate.  Returns returncode
+            attribute."""
+            if self.returncode is None:
+                _subprocess.WaitForSingleObject(self._handle,
+                                                _subprocess.INFINITE)
+                self.returncode = _subprocess.GetExitCodeProcess(self._handle)
+            return self.returncode
+
+
+        def _readerthread(self, fh: IO[AnyStr], buffer: List[AnyStr]) -> None:
+            buffer.append(fh.read())
+            fh.close()
+
+
+        def _communicate(self, input: Any) -> Tuple[Any, Any]:
+            stdout = cast(Any, None) # Return
+            stderr = cast(Any, None) # Return
+
+            if self.stdout:
+                stdout = []
+                stdout_thread = threading.Thread(target=self._readerthread,
+                                                 args=(self.stdout, stdout))
+                stdout_thread.daemon = True
+                stdout_thread.start()
+            if self.stderr:
+                stderr = []
+                stderr_thread = threading.Thread(target=self._readerthread,
+                                                 args=(self.stderr, stderr))
+                stderr_thread.daemon = True
+                stderr_thread.start()
+
+            if self.stdin:
+                if input is not None:
+                    try:
+                        self.stdin.write(input)
+                    except IOError as e:
+                        if e.errno != errno.EPIPE:
+                            raise
+                self.stdin.close()
+
+            if self.stdout:
+                stdout_thread.join()
+            if self.stderr:
+                stderr_thread.join()
+
+            # All data exchanged.  Translate lists into strings.
+            if stdout is not None:
+                stdout = stdout[0]
+            if stderr is not None:
+                stderr = stderr[0]
+
+            self.wait()
+            return (stdout, stderr)
+
+        def send_signal(self, sig: int) -> None:
+            """Send a signal to the process
+            """
+            if sig == signal.SIGTERM:
+                self.terminate()
+            elif sig == signal.CTRL_C_EVENT:
+                os.kill(self.pid, signal.CTRL_C_EVENT)
+            elif sig == signal.CTRL_BREAK_EVENT:
+                os.kill(self.pid, signal.CTRL_BREAK_EVENT)
+            else:
+                raise ValueError("Unsupported signal: {}".format(sig))
+
+        def terminate(self) -> None:
+            """Terminates the process
+            """
+            _subprocess.TerminateProcess(self._handle, 1)
+
+        def kill(self) -> None:
+            """Terminates the process
+            """
+            self.terminate()
+
+    else:
+        #
+        # POSIX methods
+        #
+        def _get_handles(self, stdin: Any, stdout: Any,
+                         stderr: Any) -> Tuple[Any, Any, Any, Any, Any, Any]:
+            """Construct and return tuple with IO objects:
+            p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
+            """
+            p2cread, p2cwrite = -1, -1
+            c2pread, c2pwrite = -1, -1
+            errread, errwrite = -1, -1
+
+            if stdin is None:
+                pass
+            elif stdin == PIPE:
+                p2cread, p2cwrite = _create_pipe()
+            elif isinstance(stdin, int):
+                p2cread = stdin
+            else:
+                # Assuming file-like object
+                p2cread = stdin.fileno()
+
+            if stdout is None:
+                pass
+            elif stdout == PIPE:
+                c2pread, c2pwrite = _create_pipe()
+            elif isinstance(stdout, int):
+                c2pwrite = stdout
+            else:
+                # Assuming file-like object
+                c2pwrite = stdout.fileno()
+
+            if stderr is None:
+                pass
+            elif stderr == PIPE:
+                errread, errwrite = _create_pipe()
+            elif stderr == STDOUT:
+                errwrite = c2pwrite
+            elif isinstance(stderr, int):
+                errwrite = stderr
+            else:
+                # Assuming file-like object
+                errwrite = stderr.fileno()
+
+            return (p2cread, p2cwrite,
+                    c2pread, c2pwrite,
+                    errread, errwrite)
+
+
+        def _close_fds(self, fds_to_keep: Set[int]) -> None:
+            start_fd = 3
+            for fd in sorted(fds_to_keep):
+                if fd >= start_fd:
+                    os.closerange(start_fd, fd)
+                    start_fd = fd + 1
+            if start_fd <= MAXFD:
+                os.closerange(start_fd, MAXFD)
+
+
+        def _execute_child(self, args: Sequence[str], executable: str,
+                           preexec_fn: Callable[[], Any], close_fds: Any,
+                           pass_fds: Any, cwd: str, env: Mapping[str, str],
+                           universal_newlines: int,
+                           startupinfo: 'STARTUPINFO', creationflags: int,
+                           shell: int,
+                           p2cread: Any, p2cwrite: Any,
+                           c2pread: Any, c2pwrite: Any,
+                           errread: Any, errwrite: Any,
+                           restore_signals: bool,
+                           start_new_session: bool) -> None:
+            """Execute program (POSIX version)"""
+
+            if isinstance(args, str):
+                args = [args]
+            else:
+                args = list(args)
+
+            if shell:
+                args = ["/bin/sh", "-c"] + args
+                if executable:
+                    args[0] = executable
+
+            if executable is None:
+                executable = args[0]
+
+            # For transferring possible exec failure from child to parent.
+            # Data format: "exception name:hex errno:description"
+            # Pickle is not used; it is complex and involves memory allocation.
+            errpipe_read, errpipe_write = _create_pipe()
+            try:
+                try:
+
+                    if have_posixsubprocess:
+                        # We must avoid complex work that could involve
+                        # malloc or free in the child process to avoid
+                        # potential deadlocks, thus we do all this here.
+                        # and pass it to fork_exec()
+
+                        if env is not None:
+                            env_list = [os.fsencode(k) + b'=' + os.fsencode(v)
+                                        for k, v in env.items()]
+                        else:
+                            env_list = None  # Use execv instead of execve.
+                        executable_enc = os.fsencode(executable)
+                        if os.path.dirname(executable_enc):
+                            executable_list = (executable_enc,) # type: tuple
+                        else:
+                            # This matches the behavior of os._execvpe().
+                            executable_list = tuple(
+                                os.path.join(os.fsencode(dir), executable_enc)
+                                for dir in os.get_exec_path(env))
+                        fds_to_keep = set(pass_fds)
+                        fds_to_keep.add(errpipe_write)
+                        self.pid = _posixsubprocess.fork_exec(
+                                args, executable_list,
+                                close_fds, sorted(fds_to_keep), cwd, env_list,
+                                p2cread, p2cwrite, c2pread, c2pwrite,
+                                errread, errwrite,
+                                errpipe_read, errpipe_write,
+                                restore_signals, start_new_session, preexec_fn)
+                        self._child_created = True
+                    else:
+                        # Pure Python implementation: It is not thread safe.
+                        # This implementation may deadlock in the child if your
+                        # parent process has any other threads running.
+
+                        gc_was_enabled = gc.isenabled()
+                        # Disable gc to avoid bug where gc -> file_dealloc ->
+                        # write to stderr -> hang.  See issue1336
+                        gc.disable()
+                        try:
+                            self.pid = os.fork()
+                        except:
+                            if gc_was_enabled:
+                                gc.enable()
+                            raise
+                        self._child_created = True
+                        if self.pid == 0:
+                            # Child
+                            try:
+                                # Close parent's pipe ends
+                                if p2cwrite != -1:
+                                    os.close(p2cwrite)
+                                if c2pread != -1:
+                                    os.close(c2pread)
+                                if errread != -1:
+                                    os.close(errread)
+                                os.close(errpipe_read)
+
+                                # When duping fds, if there arises a situation
+                                # where one of the fds is either 0, 1 or 2, it
+                                # is possible that it is overwritten (#12607).
+                                if c2pwrite == 0:
+                                    c2pwrite = os.dup(c2pwrite)
+                                if errwrite == 0 or errwrite == 1:
+                                    errwrite = os.dup(errwrite)
+
+                                # Dup fds for child
+                                def _dup2(a: int, b: int) -> None:
+                                    # dup2() removes the CLOEXEC flag but
+                                    # we must do it ourselves if dup2()
+                                    # would be a no-op (issue #10806).
+                                    if a == b:
+                                        _set_cloexec(a, False)
+                                    elif a != -1:
+                                        os.dup2(a, b)
+                                _dup2(p2cread, 0)
+                                _dup2(c2pwrite, 1)
+                                _dup2(errwrite, 2)
+
+                                # Close pipe fds.  Make sure we don't close the
+                                # same fd more than once, or standard fds.
+                                closed = set()  # type: Set[int]
+                                for fd in [p2cread, c2pwrite, errwrite]:
+                                    if fd > 2 and fd not in closed:
+                                        os.close(fd)
+                                        closed.add(fd)
+
+                                # Close all other fds, if asked for
+                                if close_fds:
+                                    fds_to_keep = set(pass_fds)
+                                    fds_to_keep.add(errpipe_write)
+                                    self._close_fds(fds_to_keep)
+
+
+                                if cwd is not None:
+                                    os.chdir(cwd)
+
+                                # This is a copy of Python/pythonrun.c
+                                # _Py_RestoreSignals().  If that were exposed
+                                # as a sys._py_restoresignals func it would be
+                                # better.. but this pure python implementation
+                                # isn't likely to be used much anymore.
+                                if restore_signals:
+                                    signals = ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ')
+                                    for sig in signals:
+                                        if hasattr(signal, sig):
+                                            signal.signal(getattr(signal, sig),
+                                                          signal.SIG_DFL)
+
+                                if start_new_session and hasattr(os, 'setsid'):
+                                    os.setsid()
+
+                                if preexec_fn:
+                                    preexec_fn()
+
+                                if env is None:
+                                    os.execvp(executable, args)
+                                else:
+                                    os.execvpe(executable, args, env)
+
+                            except:
+                                try:
+                                    exc_type, exc_value = sys.exc_info()[:2]
+                                    if isinstance(exc_value, OSError):
+                                        errno_num = exc_value.errno
+                                    else:
+                                        errno_num = 0
+                                    message = '%s:%x:%s' % (exc_type.__name__,
+                                                            errno_num, exc_value)
+                                    messageb = message.encode(errors="surrogatepass")
+                                    os.write(errpipe_write, messageb)
+                                except Exception:
+                                    # We MUST not allow anything odd happening
+                                    # above to prevent us from exiting below.
+                                    pass
+
+                            # This exitcode won't be reported to applications
+                            # so it really doesn't matter what we return.
+                            os._exit(255)
+
+                        # Parent
+                        if gc_was_enabled:
+                            gc.enable()
+                finally:
+                    # be sure the FD is closed no matter what
+                    os.close(errpipe_write)
+
+                if p2cread != -1 and p2cwrite != -1:
+                    os.close(p2cread)
+                if c2pwrite != -1 and c2pread != -1:
+                    os.close(c2pwrite)
+                if errwrite != -1 and errread != -1:
+                    os.close(errwrite)
+
+                # Wait for exec to fail or succeed; possibly raising an
+                # exception (limited in size)
+                data = bytearray()
+                while True:
+                    part = _eintr_retry_call(os.read, errpipe_read, 50000)
+                    data += part
+                    if not part or len(data) > 50000:
+                        break
+            finally:
+                # be sure the FD is closed no matter what
+                os.close(errpipe_read)
+
+            if data:
+                try:
+                    _eintr_retry_call(os.waitpid, self.pid, 0)
+                except OSError as e:
+                    if e.errno != errno.ECHILD:
+                        raise
+                try:
+                    (exception_name, hex_errno,
+                     err_msg_b) = bytes(data).split(b':', 2)
+                except ValueError:
+                    print('Bad exception data:', repr(data))
+                    exception_name = b'RuntimeError'
+                    hex_errno = b'0'
+                    err_msg_b = b'Unknown'
+                child_exception_type = getattr(
+                        builtins, exception_name.decode('ascii'),
+                        RuntimeError)
+                for fd in (p2cwrite, c2pread, errread):
+                    if fd != -1:
+                        os.close(fd)
+                err_msg = err_msg_b.decode(errors="surrogatepass")
+                if issubclass(child_exception_type, OSError) and hex_errno:
+                    errno_num = int(hex_errno, 16)
+                    if errno_num != 0:
+                        err_msg = os.strerror(errno_num)
+                        if errno_num == errno.ENOENT:
+                            err_msg += ': ' + repr(args[0])
+                    raise child_exception_type(errno_num, err_msg)
+                raise child_exception_type(err_msg)
+
+
+        def _handle_exitstatus(
+                self, sts: int,
+                _WIFSIGNALED: Callable[[int], bool] = os.WIFSIGNALED,
+                _WTERMSIG: Callable[[int], bool] = os.WTERMSIG,
+                _WIFEXITED: Callable[[int], bool] = os.WIFEXITED,
+                _WEXITSTATUS: Callable[[int], bool] = os.WEXITSTATUS) -> None:
+            # This method is called (indirectly) by __del__, so it cannot
+            # refer to anything outside of its local scope."""
+            if _WIFSIGNALED(sts):
+                self.returncode = -_WTERMSIG(sts)
+            elif _WIFEXITED(sts):
+                self.returncode = _WEXITSTATUS(sts)
+            else:
+                # Should never happen
+                raise RuntimeError("Unknown child exit status!")
+
+
+        def _internal_poll(self, _deadstate: int = None) -> int:
+            """Check if child process has terminated.  Returns returncode
+            attribute.
+
+            This method is called by __del__, so it cannot reference anything
+            outside of the local scope (nor can any methods it calls).
+
+            """
+            return self._internal_poll_posix(_deadstate)
+
+        def _internal_poll_posix(self, _deadstate: int = None,
+                                 _waitpid: Callable[[int, int],
+                                                 Tuple[int, int]] = os.waitpid,
+                                 _WNOHANG: int = os.WNOHANG,
+                                 _os_error: Any = os.error) -> int:
+            if self.returncode is None:
+                try:
+                    pid, sts = _waitpid(self.pid, _WNOHANG)
+                    if pid == self.pid:
+                        self._handle_exitstatus(sts)
+                except _os_error:
+                    if _deadstate is not None:
+                        self.returncode = _deadstate
+            return self.returncode
+
+
+        def wait(self) -> int:
+            """Wait for child process to terminate.  Returns returncode
+            attribute."""
+            if self.returncode is None:
+                try:
+                    pid, sts = _eintr_retry_call(os.waitpid, self.pid, 0)
+                except OSError as e:
+                    if e.errno != errno.ECHILD:
+                        raise
+                    # This happens if SIGCLD is set to be ignored or waiting
+                    # for child processes has otherwise been disabled for our
+                    # process.  This child is dead, we can't get the status.
+                    sts = 0
+                self._handle_exitstatus(sts)
+            return self.returncode
+
+
+        def _communicate(self, input: Any) -> Tuple[Any, Any]:
+            if self.stdin:
+                # Flush stdio buffer.  This might block, if the user has
+                # been writing to .stdin in an uncontrolled fashion.
+                self.stdin.flush()
+                if not input:
+                    self.stdin.close()
+
+            if _has_poll:
+                stdout, stderr = self._communicate_with_poll(input)
+            else:
+                stdout, stderr = self._communicate_with_select(input)
+
+            # All data exchanged.  Translate lists into strings.
+            if stdout is not None:
+                stdout2 = b''.join(stdout)
+            else:
+                stdout2 = None
+            if stderr is not None:
+                stderr2 = b''.join(stderr)
+            else:
+                stderr2 = None
+
+            # Translate newlines, if requested.
+            # This also turns bytes into strings.
+            stdout3 = cast(Any, stdout2)
+            stderr3 = cast(Any, stderr2)
+            if self.universal_newlines:
+                if stdout is not None:
+                    stdout3 = self._translate_newlines(
+                        stdout2, cast(TextIO, self.stdout).encoding)
+                if stderr is not None:
+                    stderr3 = self._translate_newlines(
+                        stderr2, cast(TextIO, self.stderr).encoding)
+
+            self.wait()
+            return (stdout3, stderr3)
+
+
+        def _communicate_with_poll(self, input: Any) -> Tuple[List[bytes],
+                                                              List[bytes]]:
+            stdout = None # type: List[bytes] # Return
+            stderr = None # type: List[bytes] # Return
+            fd2file = {}  # type: Dict[int, Any]
+            fd2output = {}  # type: Dict[int, List[bytes]]
+
+            poller = select.poll()
+            def register_and_append(file_obj: IO[Any], eventmask: int) -> None:
+                poller.register(file_obj.fileno(), eventmask)
+                fd2file[file_obj.fileno()] = file_obj
+
+            def close_unregister_and_remove(fd: int) -> None:
+                poller.unregister(fd)
+                fd2file[fd].close()
+                fd2file.pop(fd)
+
+            if self.stdin and input:
+                register_and_append(self.stdin, select.POLLOUT)
+
+            select_POLLIN_POLLPRI = select.POLLIN | select.POLLPRI
+            if self.stdout:
+                register_and_append(self.stdout, select_POLLIN_POLLPRI)
+                fd2output[self.stdout.fileno()] = stdout = []
+            if self.stderr:
+                register_and_append(self.stderr, select_POLLIN_POLLPRI)
+                fd2output[self.stderr.fileno()] = stderr = []
+
+            input_offset = 0
+            while fd2file:
+                try:
+                    ready = poller.poll()
+                except select.error as e:
+                    if e.args[0] == errno.EINTR:
+                        continue
+                    raise
+
+                # XXX Rewrite these to use non-blocking I/O on the
+                # file objects; they are no longer using C stdio!
+
+                for fd, mode in ready:
+                    if mode & select.POLLOUT:
+                        chunk = input[input_offset : input_offset + _PIPE_BUF]
+                        try:
+                            input_offset += os.write(fd, chunk)
+                        except OSError as e2:
+                            if e2.errno == errno.EPIPE:
+                                close_unregister_and_remove(fd)
+                            else:
+                                raise
+                        else:
+                            if input_offset >= len(input):
+                                close_unregister_and_remove(fd)
+                    elif mode & select_POLLIN_POLLPRI:
+                        data = os.read(fd, 4096)
+                        if not data:
+                            close_unregister_and_remove(fd)
+                        fd2output[fd].append(data)
+                    else:
+                        # Ignore hang up or errors.
+                        close_unregister_and_remove(fd)
+
+            return (stdout, stderr)
+
+
+        def _communicate_with_select(self, input: Any) -> Tuple[List[bytes],
+                                                                List[bytes]]:
+            read_set = []  # type: List[IO[Any]]
+            write_set = []  # type: List[IO[Any]]
+            stdout = None # type: List[bytes] # Return
+            stderr = None # type: List[bytes] # Return
+
+            if self.stdin and input:
+                write_set.append(self.stdin)
+            if self.stdout:
+                read_set.append(self.stdout)
+                stdout = []
+            if self.stderr:
+                read_set.append(self.stderr)
+                stderr = []
+
+            input_offset = 0
+            while read_set or write_set:
+                try:
+                    rlist, wlist, xlist = select.select(read_set, write_set, [])
+                except select.error as e:
+                    if e.args[0] == errno.EINTR:
+                        continue
+                    raise
+
+                # XXX Rewrite these to use non-blocking I/O on the
+                # file objects; they are no longer using C stdio!
+
+                if self.stdin in wlist:
+                    chunk = input[input_offset : input_offset + _PIPE_BUF]
+                    try:
+                        bytes_written = os.write(self.stdin.fileno(), chunk)
+                    except OSError as oe:
+                        if oe.errno == errno.EPIPE:
+                            self.stdin.close()
+                            write_set.remove(self.stdin)
+                        else:
+                            raise
+                    else:
+                        input_offset += bytes_written
+                        if input_offset >= len(input):
+                            self.stdin.close()
+                            write_set.remove(self.stdin)
+
+                if self.stdout in rlist:
+                    data = os.read(self.stdout.fileno(), 1024)
+                    if not data:
+                        self.stdout.close()
+                        read_set.remove(self.stdout)
+                    stdout.append(data)
+
+                if self.stderr in rlist:
+                    data = os.read(self.stderr.fileno(), 1024)
+                    if not data:
+                        self.stderr.close()
+                        read_set.remove(self.stderr)
+                    stderr.append(data)
+
+            return (stdout, stderr)
+
+
+        def send_signal(self, sig: int) -> None:
+            """Send a signal to the process
+            """
+            os.kill(self.pid, sig)
+
+        def terminate(self) -> None:
+            """Terminate the process with SIGTERM
+            """
+            self.send_signal(signal.SIGTERM)
+
+        def kill(self) -> None:
+            """Kill the process with SIGKILL
+            """
+            self.send_signal(signal.SIGKILL)
+
+
+def _demo_posix() -> None:
+    #
+    # Example 1: Simple redirection: Get process list
+    #
+    plist = Popen(["ps"], stdout=PIPE).communicate()[0]
+    print("Process list:")
+    print(plist)
+
+    #
+    # Example 2: Change uid before executing child
+    #
+    if os.getuid() == 0:
+        p = Popen(["id"], preexec_fn=lambda: os.setuid(100))
+        p.wait()
+
+    #
+    # Example 3: Connecting several subprocesses
+    #
+    print("Looking for 'hda'...")
+    p1 = Popen(["dmesg"], stdout=PIPE)
+    p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
+    print(repr(p2.communicate()[0]))
+
+    #
+    # Example 4: Catch execution error
+    #
+    print()
+    print("Trying a weird file...")
+    try:
+        print(Popen(["/this/path/does/not/exist"]).communicate())
+    except OSError as e:
+        if e.errno == errno.ENOENT:
+            print("The file didn't exist.  I thought so...")
+        else:
+            print("Error", e.errno)
+    else:
+        print("Gosh.  No error.", file=sys.stderr)
+
+
+def _demo_windows() -> None:
+    #
+    # Example 1: Connecting several subprocesses
+    #
+    print("Looking for 'PROMPT' in set output...")
+    p1 = Popen("set", stdout=PIPE, shell=True)
+    p2 = Popen('find "PROMPT"', stdin=p1.stdout, stdout=PIPE)
+    print(repr(p2.communicate()[0]))
+
+    #
+    # Example 2: Simple execution of program
+    #
+    print("Executing calc...")
+    p = Popen("calc")
+    p.wait()
+
+
+if __name__ == "__main__":
+    if mswindows:
+        _demo_windows()
+    else:
+        _demo_posix()
diff --git a/test-data/stdlib-samples/3.2/tempfile.py b/test-data/stdlib-samples/3.2/tempfile.py
new file mode 100644
index 0000000..d12e21e
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/tempfile.py
@@ -0,0 +1,717 @@
+"""Temporary files.
+
+This module provides generic, low- and high-level interfaces for
+creating temporary files and directories.  The interfaces listed
+as "safe" just below can be used without fear of race conditions.
+Those listed as "unsafe" cannot, and are provided for backward
+compatibility only.
+
+This module also provides some data items to the user:
+
+  TMP_MAX  - maximum number of names that will be tried before
+             giving up.
+  template - the default prefix for all temporary names.
+             You may change this to control the default prefix.
+  tempdir  - If this is set to a string before the first use of
+             any routine from this module, it will be considered as
+             another candidate location to store temporary files.
+"""
+
+__all__ = [
+    "NamedTemporaryFile", "TemporaryFile", # high level safe interfaces
+    "SpooledTemporaryFile", "TemporaryDirectory",
+    "mkstemp", "mkdtemp",                  # low level safe interfaces
+    "mktemp",                              # deprecated unsafe interface
+    "TMP_MAX", "gettempprefix",            # constants
+    "tempdir", "gettempdir"
+   ]
+
+
+# Imports.
+
+import warnings as _warnings
+import sys as _sys
+import io as _io
+import os as _os
+import errno as _errno
+from random import Random as _Random
+
+from typing import (
+    Any as _Any, Callable as _Callable, Iterator as _Iterator,
+    List as _List, Tuple as _Tuple, Dict as _Dict, Iterable as _Iterable,
+    IO as _IO, cast as _cast, Optional as _Optional, Type as _Type,
+)
+from types import TracebackType as _TracebackType
+
+try:
+    import fcntl as _fcntl
+except ImportError:
+    def _set_cloexec(fd: int) -> None:
+        pass
+else:
+    def _set_cloexec(fd: int) -> None:
+        try:
+            flags = _fcntl.fcntl(fd, _fcntl.F_GETFD, 0)
+        except IOError:
+            pass
+        else:
+            # flags read successfully, modify
+            flags |= _fcntl.FD_CLOEXEC
+            _fcntl.fcntl(fd, _fcntl.F_SETFD, flags)
+
+
+try:
+    import _thread
+    _allocate_lock = _thread.allocate_lock # type: _Callable[[], _Any]
+except ImportError:
+    import _dummy_thread
+    _allocate_lock = _dummy_thread.allocate_lock
+
+_text_openflags = _os.O_RDWR | _os.O_CREAT | _os.O_EXCL
+if hasattr(_os, 'O_NOINHERIT'):
+    _text_openflags |= _os.O_NOINHERIT
+if hasattr(_os, 'O_NOFOLLOW'):
+    _text_openflags |= _os.O_NOFOLLOW
+
+_bin_openflags = _text_openflags
+if hasattr(_os, 'O_BINARY'):
+    _bin_openflags |= _os.O_BINARY
+
+if hasattr(_os, 'TMP_MAX'):
+    TMP_MAX = _os.TMP_MAX
+else:
+    TMP_MAX = 10000
+
+template = "tmp"
+
+# Internal routines.
+
+_once_lock = _allocate_lock()
+
+if hasattr(_os, "lstat"):
+    _stat = _os.lstat # type: _Callable[[str], object]
+elif hasattr(_os, "stat"):
+    _stat = _os.stat
+else:
+    # Fallback.  All we need is something that raises os.error if the
+    # file doesn't exist.
+    def __stat(fn: str) -> object:
+        try:
+            f = open(fn)
+        except IOError:
+            raise _os.error()
+        f.close()
+    _stat = __stat
+
+def _exists(fn: str) -> bool:
+    try:
+        _stat(fn)
+    except _os.error:
+        return False
+    else:
+        return True
+
+class _RandomNameSequence(_Iterator[str]):
+    """An instance of _RandomNameSequence generates an endless
+    sequence of unpredictable strings which can safely be incorporated
+    into file names.  Each string is six characters long.  Multiple
+    threads can safely use the same instance at the same time.
+
+    _RandomNameSequence is an iterator."""
+
+    characters = "abcdefghijklmnopqrstuvwxyz0123456789_"
+
+    @property
+    def rng(self) -> _Random:
+        cur_pid = _os.getpid()
+        if cur_pid != getattr(self, '_rng_pid', None):
+            self._rng = _Random()
+            self._rng_pid = cur_pid
+        return self._rng
+
+    def __iter__(self) -> _Iterator[str]:
+        return self
+
+    def __next__(self) -> str:
+        c = self.characters
+        choose = self.rng.choice
+        letters = [choose(c) for dummy in "123456"]
+        return ''.join(letters)
+
+def _candidate_tempdir_list() -> _List[str]:
+    """Generate a list of candidate temporary directories which
+    _get_default_tempdir will try."""
+
+    dirlist = [] # type: _List[str]
+
+    # First, try the environment.
+    for envname in 'TMPDIR', 'TEMP', 'TMP':
+        dirname = _os.getenv(envname)
+        if dirname: dirlist.append(dirname)
+
+    # Failing that, try OS-specific locations.
+    if _os.name == 'nt':
+        dirlist.extend([ r'c:\temp', r'c:\tmp', r'\temp', r'\tmp' ])
+    else:
+        dirlist.extend([ '/tmp', '/var/tmp', '/usr/tmp' ])
+
+    # As a last resort, the current directory.
+    try:
+        dirlist.append(_os.getcwd())
+    except (AttributeError, _os.error):
+        dirlist.append(_os.curdir)
+
+    return dirlist
+
+def _get_default_tempdir() -> str:
+    """Calculate the default directory to use for temporary files.
+    This routine should be called exactly once.
+
+    We determine whether or not a candidate temp dir is usable by
+    trying to create and write to a file in that directory.  If this
+    is successful, the test file is deleted.  To prevent denial of
+    service, the name of the test file must be randomized."""
+
+    namer = _RandomNameSequence()
+    dirlist = _candidate_tempdir_list()
+
+    for dir in dirlist:
+        if dir != _os.curdir:
+            dir = _os.path.normcase(_os.path.abspath(dir))
+        # Try only a few names per directory.
+        for seq in range(100):
+            name = next(namer)
+            filename = _os.path.join(dir, name)
+            try:
+                fd = _os.open(filename, _bin_openflags, 0o600)
+                fp = _io.open(fd, 'wb')
+                fp.write(b'blat')
+                fp.close()
+                _os.unlink(filename)
+                fp = fd = None
+                return dir
+            except (OSError, IOError) as e:
+                if e.args[0] != _errno.EEXIST:
+                    break # no point trying more names in this directory
+                pass
+    raise IOError(_errno.ENOENT,
+                  "No usable temporary directory found in %s" % dirlist)
+
+_name_sequence = None # type: _RandomNameSequence
+
+def _get_candidate_names() -> _RandomNameSequence:
+    """Common setup sequence for all user-callable interfaces."""
+
+    global _name_sequence
+    if _name_sequence is None:
+        _once_lock.acquire()
+        try:
+            if _name_sequence is None:
+                _name_sequence = _RandomNameSequence()
+        finally:
+            _once_lock.release()
+    return _name_sequence
+
+
+def _mkstemp_inner(dir: str, pre: str, suf: str,
+                   flags: int) -> _Tuple[int, str]:
+    """Code common to mkstemp, TemporaryFile, and NamedTemporaryFile."""
+
+    names = _get_candidate_names()
+
+    for seq in range(TMP_MAX):
+        name = next(names)
+        file = _os.path.join(dir, pre + name + suf)
+        try:
+            fd = _os.open(file, flags, 0o600)
+            _set_cloexec(fd)
+            return (fd, _os.path.abspath(file))
+        except OSError as e:
+            if e.errno == _errno.EEXIST:
+                continue # try again
+            raise
+
+    raise IOError(_errno.EEXIST, "No usable temporary file name found")
+
+
+# User visible interfaces.
+
+def gettempprefix() -> str:
+    """Accessor for tempdir.template."""
+    return template
+
+tempdir = None # type: str
+
+def gettempdir() -> str:
+    """Accessor for tempfile.tempdir."""
+    global tempdir
+    if tempdir is None:
+        _once_lock.acquire()
+        try:
+            if tempdir is None:
+                tempdir = _get_default_tempdir()
+        finally:
+            _once_lock.release()
+    return tempdir
+
+def mkstemp(suffix: str = "", prefix: str = template, dir: str = None,
+            text: bool = False) -> _Tuple[int, str]:
+    """User-callable function to create and return a unique temporary
+    file.  The return value is a pair (fd, name) where fd is the
+    file descriptor returned by os.open, and name is the filename.
+
+    If 'suffix' is specified, the file name will end with that suffix,
+    otherwise there will be no suffix.
+
+    If 'prefix' is specified, the file name will begin with that prefix,
+    otherwise a default prefix is used.
+
+    If 'dir' is specified, the file will be created in that directory,
+    otherwise a default directory is used.
+
+    If 'text' is specified and true, the file is opened in text
+    mode.  Else (the default) the file is opened in binary mode.  On
+    some operating systems, this makes no difference.
+
+    The file is readable and writable only by the creating user ID.
+    If the operating system uses permission bits to indicate whether a
+    file is executable, the file is executable by no one. The file
+    descriptor is not inherited by children of this process.
+
+    Caller is responsible for deleting the file when done with it.
+    """
+
+    if dir is None:
+        dir = gettempdir()
+
+    if text:
+        flags = _text_openflags
+    else:
+        flags = _bin_openflags
+
+    return _mkstemp_inner(dir, prefix, suffix, flags)
+
+
+def mkdtemp(suffix: str = "", prefix: str = template, dir: str = None) -> str:
+    """User-callable function to create and return a unique temporary
+    directory.  The return value is the pathname of the directory.
+
+    Arguments are as for mkstemp, except that the 'text' argument is
+    not accepted.
+
+    The directory is readable, writable, and searchable only by the
+    creating user.
+
+    Caller is responsible for deleting the directory when done with it.
+    """
+
+    if dir is None:
+        dir = gettempdir()
+
+    names = _get_candidate_names()
+
+    for seq in range(TMP_MAX):
+        name = next(names)
+        file = _os.path.join(dir, prefix + name + suffix)
+        try:
+            _os.mkdir(file, 0o700)
+            return file
+        except OSError as e:
+            if e.errno == _errno.EEXIST:
+                continue # try again
+            raise
+
+    raise IOError(_errno.EEXIST, "No usable temporary directory name found")
+
+def mktemp(suffix: str = "", prefix: str = template, dir: str = None) -> str:
+    """User-callable function to return a unique temporary file name.  The
+    file is not created.
+
+    Arguments are as for mkstemp, except that the 'text' argument is
+    not accepted.
+
+    This function is unsafe and should not be used.  The file name
+    refers to a file that did not exist at some point, but by the time
+    you get around to creating it, someone else may have beaten you to
+    the punch.
+    """
+
+##    from warnings import warn as _warn
+##    _warn("mktemp is a potential security risk to your program",
+##          RuntimeWarning, stacklevel=2)
+
+    if dir is None:
+        dir = gettempdir()
+
+    names = _get_candidate_names()
+    for seq in range(TMP_MAX):
+        name = next(names)
+        file = _os.path.join(dir, prefix + name + suffix)
+        if not _exists(file):
+            return file
+
+    raise IOError(_errno.EEXIST, "No usable temporary filename found")
+
+
+class _TemporaryFileWrapper:
+    """Temporary file wrapper
+
+    This class provides a wrapper around files opened for
+    temporary use.  In particular, it seeks to automatically
+    remove the file when it is no longer needed.
+    """
+
+    def __init__(self, file: _IO[_Any], name: str,
+                 delete: bool = True) -> None:
+        self.file = file
+        self.name = name
+        self.close_called = False
+        self.delete = delete
+
+        if _os.name != 'nt':
+            # Cache the unlinker so we don't get spurious errors at
+            # shutdown when the module-level "os" is None'd out.  Note
+            # that this must be referenced as self.unlink, because the
+            # name TemporaryFileWrapper may also get None'd out before
+            # __del__ is called.
+            self.unlink = _os.unlink
+
+    def __getattr__(self, name: str) -> _Any:
+        # Attribute lookups are delegated to the underlying file
+        # and cached for non-numeric results
+        # (i.e. methods are cached, closed and friends are not)
+        file = _cast(_Any, self).__dict__['file'] # type: _IO[_Any]
+        a = getattr(file, name)
+        if not isinstance(a, int):
+            setattr(self, name, a)
+        return a
+
+    # The underlying __enter__ method returns the wrong object
+    # (self.file) so override it to return the wrapper
+    def __enter__(self) -> '_TemporaryFileWrapper':
+        self.file.__enter__()
+        return self
+
+    # iter() doesn't use __getattr__ to find the __iter__ method
+    def __iter__(self) -> _Iterator[_Any]:
+        return iter(self.file)
+
+    # NT provides delete-on-close as a primitive, so we don't need
+    # the wrapper to do anything special.  We still use it so that
+    # file.name is useful (i.e. not "(fdopen)") with NamedTemporaryFile.
+    if _os.name != 'nt':
+        def close(self) -> None:
+            if not self.close_called:
+                self.close_called = True
+                self.file.close()
+                if self.delete:
+                    self.unlink(self.name)
+
+        def __del__(self) -> None:
+            self.close()
+
+        # Need to trap __exit__ as well to ensure the file gets
+        # deleted when used in a with statement
+        def __exit__(self, exc: _Type[BaseException], value: BaseException,
+                     tb: _Optional[_TracebackType]) -> bool:
+            result = self.file.__exit__(exc, value, tb)
+            self.close()
+            return result
+    else:
+        def __exit__(self, exc: _Type[BaseException], value: BaseException,
+                     tb: _Optional[_TracebackType]) -> bool:
+            self.file.__exit__(exc, value, tb)
+
+
+def NamedTemporaryFile(mode: str = 'w+b', buffering: int = -1,
+                       encoding: str = None, newline: str = None,
+                       suffix: str = "", prefix: str = template,
+                       dir: str = None, delete: bool = True) -> _IO[_Any]:
+    """Create and return a temporary file.
+    Arguments:
+    'prefix', 'suffix', 'dir' -- as for mkstemp.
+    'mode' -- the mode argument to io.open (default "w+b").
+    'buffering' -- the buffer size argument to io.open (default -1).
+    'encoding' -- the encoding argument to io.open (default None)
+    'newline' -- the newline argument to io.open (default None)
+    'delete' -- whether the file is deleted on close (default True).
+    The file is created as mkstemp() would do it.
+
+    Returns an object with a file-like interface; the name of the file
+    is accessible as file.name.  The file will be automatically deleted
+    when it is closed unless the 'delete' argument is set to False.
+    """
+
+    if dir is None:
+        dir = gettempdir()
+
+    flags = _bin_openflags
+
+    # Setting O_TEMPORARY in the flags causes the OS to delete
+    # the file when it is closed.  This is only supported by Windows.
+    if _os.name == 'nt' and delete:
+        flags |= _os.O_TEMPORARY
+
+    (fd, name) = _mkstemp_inner(dir, prefix, suffix, flags)
+    file = _io.open(fd, mode, buffering=buffering,
+                    newline=newline, encoding=encoding)
+
+    return _cast(_IO[_Any], _TemporaryFileWrapper(file, name, delete))
+
+if _os.name != 'posix' or _sys.platform == 'cygwin':
+    # On non-POSIX and Cygwin systems, assume that we cannot unlink a file
+    # while it is open.
+    TemporaryFile = NamedTemporaryFile
+
+else:
+    def _TemporaryFile(mode: str = 'w+b', buffering: int = -1,
+                       encoding: str = None, newline: str = None,
+                       suffix: str = "", prefix: str = template,
+                       dir: str = None, delete: bool = True) -> _IO[_Any]:
+        """Create and return a temporary file.
+        Arguments:
+        'prefix', 'suffix', 'dir' -- as for mkstemp.
+        'mode' -- the mode argument to io.open (default "w+b").
+        'buffering' -- the buffer size argument to io.open (default -1).
+        'encoding' -- the encoding argument to io.open (default None)
+        'newline' -- the newline argument to io.open (default None)
+        The file is created as mkstemp() would do it.
+
+        Returns an object with a file-like interface.  The file has no
+        name, and will cease to exist when it is closed.
+        """
+
+        if dir is None:
+            dir = gettempdir()
+
+        flags = _bin_openflags
+
+        (fd, name) = _mkstemp_inner(dir, prefix, suffix, flags)
+        try:
+            _os.unlink(name)
+            return _io.open(fd, mode, buffering=buffering,
+                            newline=newline, encoding=encoding)
+        except:
+            _os.close(fd)
+            raise
+    TemporaryFile = _TemporaryFile
+
+class SpooledTemporaryFile:
+    """Temporary file wrapper, specialized to switch from
+    StringIO to a real file when it exceeds a certain size or
+    when a fileno is needed.
+    """
+    _rolled = False
+    _file = None  # type: _Any   # BytesIO, StringIO or TemporaryFile
+
+    def __init__(self, max_size: int = 0, mode: str = 'w+b',
+                 buffering: int = -1, encoding: str = None,
+                 newline: str = None, suffix: str = "",
+                 prefix: str = template, dir: str = None) -> None:
+        if 'b' in mode:
+            self._file = _io.BytesIO()
+        else:
+            # Setting newline="\n" avoids newline translation;
+            # this is important because otherwise on Windows we'd
+            # hget double newline translation upon rollover().
+            self._file = _io.StringIO(newline="\n")
+        self._max_size = max_size
+        self._rolled = False
+        self._TemporaryFileArgs = {
+                                   'mode': mode, 'buffering': buffering,
+                                   'suffix': suffix, 'prefix': prefix,
+                                   'encoding': encoding, 'newline': newline,
+                                   'dir': dir} # type: _Dict[str, _Any]
+
+    def _check(self, file: _IO[_Any]) -> None:
+        if self._rolled: return
+        max_size = self._max_size
+        if max_size and file.tell() > max_size:
+            self.rollover()
+
+    def rollover(self) -> None:
+        if self._rolled: return
+        file = self._file
+        newfile = self._file = TemporaryFile(**self._TemporaryFileArgs)
+        self._TemporaryFileArgs = None
+
+        newfile.write(file.getvalue())
+        newfile.seek(file.tell(), 0)
+
+        self._rolled = True
+
+    # The method caching trick from NamedTemporaryFile
+    # won't work here, because _file may change from a
+    # _StringIO instance to a real file. So we list
+    # all the methods directly.
+
+    # Context management protocol
+    def __enter__(self) -> 'SpooledTemporaryFile':
+        if self._file.closed:
+            raise ValueError("Cannot enter context with closed file")
+        return self
+
+    def __exit__(self, exc: type, value: BaseException,
+                 tb: _TracebackType) -> bool:
+        self._file.close()
+
+    # file protocol
+    def __iter__(self) -> _Iterable[_Any]:
+        return self._file.__iter__()
+
+    def close(self) -> None:
+        self._file.close()
+
+    @property
+    def closed(self) -> bool:
+        return self._file.closed
+
+    @property
+    def encoding(self) -> str:
+        return self._file.encoding
+
+    def fileno(self) -> int:
+        self.rollover()
+        return self._file.fileno()
+
+    def flush(self) -> None:
+        self._file.flush()
+
+    def isatty(self) -> bool:
+        return self._file.isatty()
+
+    @property
+    def mode(self) -> str:
+        return self._file.mode
+
+    @property
+    def name(self) -> str:
+        return self._file.name
+
+    @property
+    def newlines(self) -> _Any:
+        return self._file.newlines
+
+    #def next(self):
+    #    return self._file.next
+
+    def read(self, n: int = -1) -> _Any:
+        return self._file.read(n)
+
+    def readline(self, limit: int = -1) -> _Any:
+        return self._file.readline(limit)
+
+    def readlines(self, *args) -> _List[_Any]:
+        return self._file.readlines(*args)
+
+    def seek(self, offset: int, whence: int = 0) -> None:
+        self._file.seek(offset, whence)
+
+    @property
+    def softspace(self) -> bool:
+        return self._file.softspace
+
+    def tell(self) -> int:
+        return self._file.tell()
+
+    def truncate(self) -> None:
+        self._file.truncate()
+
+    def write(self, s: _Any) -> int:
+        file = self._file # type: _IO[_Any]
+        rv = file.write(s)
+        self._check(file)
+        return rv
+
+    def writelines(self, iterable: _Iterable[_Any]) -> None:
+        file = self._file # type: _IO[_Any]
+        file.writelines(iterable)
+        self._check(file)
+
+    #def xreadlines(self, *args) -> _Any:
+    #    return self._file.xreadlines(*args)
+
+
+class TemporaryDirectory(object):
+    """Create and return a temporary directory.  This has the same
+    behavior as mkdtemp but can be used as a context manager.  For
+    example:
+
+        with TemporaryDirectory() as tmpdir:
+            ...
+
+    Upon exiting the context, the directory and everthing contained
+    in it are removed.
+    """
+
+    def __init__(self, suffix: str = "", prefix: str = template,
+                 dir: str = None) -> None:
+        self._closed = False
+        self.name = None # type: str # Handle mkdtemp throwing an exception
+        self.name = mkdtemp(suffix, prefix, dir)
+
+        # XXX (ncoghlan): The following code attempts to make
+        # this class tolerant of the module nulling out process
+        # that happens during CPython interpreter shutdown
+        # Alas, it doesn't actually manage it. See issue #10188
+        self._listdir = _os.listdir
+        self._path_join = _os.path.join
+        self._isdir = _os.path.isdir
+        self._islink = _os.path.islink
+        self._remove = _os.remove
+        self._rmdir = _os.rmdir
+        self._os_error = _os.error
+        self._warn = _warnings.warn
+
+    def __repr__(self) -> str:
+        return "<{} {!r}>".format(self.__class__.__name__, self.name)
+
+    def __enter__(self) -> str:
+        return self.name
+
+    def cleanup(self, _warn: bool = False) -> None:
+        if self.name and not self._closed:
+            try:
+                self._rmtree(self.name)
+            except (TypeError, AttributeError) as ex:
+                # Issue #10188: Emit a warning on stderr
+                # if the directory could not be cleaned
+                # up due to missing globals
+                if "None" not in str(ex):
+                    raise
+                print("ERROR: {!r} while cleaning up {!r}".format(ex, self,),
+                      file=_sys.stderr)
+                return
+            self._closed = True
+            if _warn:
+                self._warn("Implicitly cleaning up {!r}".format(self),
+                           ResourceWarning)
+
+    def __exit__(self, exc: type, value: BaseException,
+                 tb: _TracebackType) -> bool:
+        self.cleanup()
+
+    def __del__(self) -> None:
+        # Issue a ResourceWarning if implicit cleanup needed
+        self.cleanup(_warn=True)
+
+    def _rmtree(self, path: str) -> None:
+        # Essentially a stripped down version of shutil.rmtree.  We can't
+        # use globals because they may be None'ed out at shutdown.
+        for name in self._listdir(path):
+            fullname = self._path_join(path, name)
+            try:
+                isdir = self._isdir(fullname) and not self._islink(fullname)
+            except self._os_error:
+                isdir = False
+            if isdir:
+                self._rmtree(fullname)
+            else:
+                try:
+                    self._remove(fullname)
+                except self._os_error:
+                    pass
+        try:
+            self._rmdir(path)
+        except self._os_error:
+            pass
diff --git a/typeshed/stdlib/3.3/xml/etree/__init__.pyi b/test-data/stdlib-samples/3.2/test/__init__.py
similarity index 100%
rename from typeshed/stdlib/3.3/xml/etree/__init__.pyi
rename to test-data/stdlib-samples/3.2/test/__init__.py
diff --git a/test-data/stdlib-samples/3.2/test/randv2_32.pck b/test-data/stdlib-samples/3.2/test/randv2_32.pck
new file mode 100644
index 0000000..587ab24
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/randv2_32.pck
@@ -0,0 +1,633 @@
+crandom
+Random
+p0
+(tRp1
+(I2
+(I-2147483648
+I-845974985
+I-1294090086
+I1193659239
+I-1849481736
+I-946579732
+I-34406770
+I1749049471
+I1997774682
+I1432026457
+I1288127073
+I-943175655
+I-1718073964
+I339993548
+I-1045260575
+I582505037
+I-1555108250
+I-1114765620
+I1578648750
+I-350384412
+I-20845848
+I-288255314
+I738790953
+I1901249641
+I1999324672
+I-277361068
+I-1515885839
+I2061761596
+I-809068089
+I1287981136
+I258129492
+I-6303745
+I-765148337
+I1090344911
+I1653434703
+I-1242923628
+I1639171313
+I-1870042660
+I-1655014050
+I345609048
+I2093410138
+I1963263374
+I-2122098342
+I1336859961
+I-810942729
+I945857753
+I2103049942
+I623922684
+I1418349549
+I690877342
+I754973107
+I-1605111847
+I1607137813
+I-1704917131
+I1317536428
+I1714882872
+I-1665385120
+I1823694397
+I-1790836866
+I-1696724812
+I-603979847
+I-498599394
+I-341265291
+I927388804
+I1778562135
+I1716895781
+I1023198122
+I1726145967
+I941955525
+I1240148950
+I-1929634545
+I-1288147083
+I-519318335
+I754559777
+I-707571958
+I374604022
+I420424061
+I-1095443486
+I1621934944
+I-1220502522
+I-140049608
+I-918917122
+I304341024
+I-1637446057
+I-353934485
+I1973436235
+I433380241
+I-686759465
+I-2111563154
+I-573422032
+I804304541
+I1513063483
+I1417381689
+I-804778729
+I211756408
+I544537322
+I890881641
+I150378374
+I1765739392
+I1011604116
+I584889095
+I1400520554
+I413747808
+I-1741992587
+I-1882421574
+I-1373001903
+I-1885348538
+I903819480
+I1083220038
+I-1318105424
+I1740421404
+I1693089625
+I775965557
+I1319608037
+I-2127475785
+I-367562895
+I-1416273451
+I1693000327
+I-1217438421
+I834405522
+I-128287275
+I864057548
+I-973917356
+I7304111
+I1712253182
+I1353897741
+I672982288
+I1778575559
+I-403058377
+I-38540378
+I-1393713496
+I13193171
+I1127196200
+I205176472
+I-2104790506
+I299985416
+I1403541685
+I-1018270667
+I-1980677490
+I-1182625797
+I1637015181
+I-1795357414
+I1514413405
+I-924516237
+I-1841873650
+I-1014591269
+I1576616065
+I-1319103135
+I-120847840
+I2062259778
+I-9285070
+I1160890300
+I-575137313
+I-1509108275
+I46701926
+I-287560914
+I-256824960
+I577558250
+I900598310
+I944607867
+I2121154920
+I-1170505192
+I-1347170575
+I77247778
+I-1899015765
+I1234103327
+I1027053658
+I1934632322
+I-792031234
+I1147322536
+I1290655117
+I1002059715
+I1325898538
+I896029793
+I-790940694
+I-980470721
+I-1922648255
+I-951672814
+I291543943
+I1158740218
+I-1959023736
+I-1977185236
+I1527900076
+I514104195
+I-814154113
+I-593157883
+I-1023704660
+I1285688377
+I-2117525386
+I768954360
+I-38676846
+I-799848659
+I-1305517259
+I-1938213641
+I-462146758
+I-1663302892
+I1899591069
+I-22935388
+I-275856976
+I-443736893
+I-739441156
+I93862068
+I-838105669
+I1735629845
+I-817484206
+I280814555
+I1753547179
+I1811123479
+I1974543632
+I-48447465
+I-642694345
+I-531149613
+I518698953
+I-221642627
+I-686519187
+I776644303
+I257774400
+I-1499134857
+I-1055273455
+I-237023943
+I1981752330
+I-917671662
+I-372905983
+I1588058420
+I1171936660
+I-1730977121
+I1360028989
+I1769469287
+I1910709542
+I-852692959
+I1396944667
+I-1723999155
+I-310975435
+I-1965453954
+I-1636858570
+I2005650794
+I680293715
+I1355629386
+I844514684
+I-1909152807
+I-808646074
+I1936510018
+I1134413810
+I-143411047
+I-1478436304
+I1394969244
+I-1170110660
+I1963112086
+I-1518351049
+I-1506287443
+I-455023090
+I-855366028
+I-1746785568
+I933990882
+I-703625141
+I-285036872
+I188277905
+I1471578620
+I-981382835
+I-586974220
+I945619758
+I1608778444
+I-1708548066
+I-1897629320
+I-42617810
+I-836840790
+I539154487
+I-235706962
+I332074418
+I-575700589
+I1534608003
+I632116560
+I-1819760653
+I642052958
+I-722391771
+I-1104719475
+I-1196847084
+I582413973
+I1563394876
+I642007944
+I108989456
+I361625014
+I677308625
+I-1806529496
+I-959050708
+I-1858251070
+I-216069832
+I701624579
+I501238033
+I12287030
+I1895107107
+I2089098638
+I-874806230
+I1236279203
+I563718890
+I-544352489
+I-1879707498
+I1767583393
+I-1776604656
+I-693294301
+I-88882831
+I169303357
+I1299196152
+I-1122791089
+I-379157172
+I1934671851
+I1575736961
+I-19573174
+I-1401511009
+I9305167
+I-1115174467
+I1670735537
+I1226436501
+I-2004524535
+I1767463878
+I-1722855079
+I-559413926
+I1529810851
+I1201272087
+I-1297130971
+I-1188149982
+I1396557188
+I-370358342
+I-1006619702
+I1600942463
+I906087130
+I-76991909
+I2069580179
+I-1674195181
+I-2098404729
+I-940972459
+I-573399187
+I-1930386277
+I-721311199
+I-647834744
+I1452181671
+I688681916
+I1812793731
+I1704380620
+I-1389615179
+I866287837
+I-1435265007
+I388400782
+I-147986600
+I-1613598851
+I-1040347408
+I782063323
+I-239282031
+I-575966722
+I-1865208174
+I-481365146
+I579572803
+I-1239481494
+I335361280
+I-429722947
+I1881772789
+I1908103808
+I1653690013
+I-1668588344
+I1933787953
+I-2033480609
+I22162797
+I-1516527040
+I-461232482
+I-16201372
+I-2043092030
+I114990337
+I-1524090084
+I1456374020
+I458606440
+I-1928083218
+I227773125
+I-1129028159
+I1678689
+I1575896907
+I-1792935220
+I-151387575
+I64084088
+I-95737215
+I1337335688
+I-1963466345
+I1243315130
+I-1798518411
+I-546013212
+I-607065396
+I1219824160
+I1715218469
+I-1368163783
+I1701552913
+I-381114888
+I1068821717
+I266062971
+I-2066513172
+I1767407229
+I-780936414
+I-705413443
+I-1256268847
+I1646874149
+I1107690353
+I839133072
+I67001749
+I860763503
+I884880613
+I91977084
+I755371933
+I420745153
+I-578480690
+I-1520193551
+I1011369331
+I-99754575
+I-733141064
+I-500598588
+I1081124271
+I-1341266575
+I921002612
+I-848852487
+I-1904467341
+I-1294256973
+I-94074714
+I-1778758498
+I-1401188547
+I2101830578
+I2058864877
+I-272875991
+I-1375854779
+I-1332937870
+I619425525
+I-1034529639
+I-36454393
+I-2030499985
+I-1637127500
+I-1408110287
+I-2108625749
+I-961007436
+I1475654951
+I-791946251
+I1667792115
+I1818978830
+I1897980514
+I1959546477
+I-74478911
+I-508643347
+I461594399
+I538802715
+I-2094970071
+I-2076660253
+I1091358944
+I1944029246
+I-343957436
+I-1915845022
+I1237620188
+I1144125174
+I1522190520
+I-670252952
+I-19469226
+I675626510
+I758750096
+I909724354
+I-1846259652
+I544669343
+I445182495
+I-821519930
+I-1124279685
+I-1668995122
+I1653284793
+I-678555151
+I-687513207
+I1558259445
+I-1978866839
+I1558835601
+I1732138472
+I-1904793363
+I620020296
+I1562597874
+I1942617227
+I-549632552
+I721603795
+I417978456
+I-1355281522
+I-538065208
+I-1079523196
+I187375699
+I449064972
+I1018083947
+I1632388882
+I-493269866
+I92769041
+I1477146750
+I1782708404
+I444873376
+I1085851104
+I-6823272
+I-1302251853
+I1602050688
+I-1042187824
+I287161745
+I-1972094479
+I103271491
+I2131619773
+I-2064115870
+I766815498
+I990861458
+I-1664407378
+I1083746756
+I-1018331904
+I-677315687
+I-951670647
+I-952356874
+I451460609
+I-818615564
+I851439508
+I656362634
+I-1351240485
+I823378078
+I1985597385
+I597757740
+I-1512303057
+I1590872798
+I1108424213
+I818850898
+I-1368594306
+I-201107761
+I1793370378
+I1247597611
+I-1594326264
+I-601653890
+I427642759
+I248322113
+I-292545338
+I1708985870
+I1917042771
+I429354503
+I-478470329
+I793960014
+I369939133
+I1728189157
+I-518963626
+I-278523974
+I-1877289696
+I-2088617658
+I-1367940049
+I-62295925
+I197975119
+I-252900777
+I803430539
+I485759441
+I-528283480
+I-1287443963
+I-478617444
+I-861906946
+I-649095555
+I-893184337
+I2050571322
+I803433133
+I1629574571
+I1649720417
+I-2050225209
+I1208598977
+I720314344
+I-615166251
+I-835077127
+I-1405372429
+I995698064
+I148123240
+I-943016676
+I-594609622
+I-1381596711
+I1017195301
+I-1268893013
+I-1815985179
+I-1393570351
+I-870027364
+I-476064472
+I185582645
+I569863326
+I1098584267
+I-1599147006
+I-485054391
+I-852098365
+I1477320135
+I222316762
+I-1515583064
+I-935051367
+I393383063
+I819617226
+I722921837
+I-1241806499
+I-1358566385
+I1666813591
+I1333875114
+I-1663688317
+I-47254623
+I-885800726
+I307388991
+I-1219459496
+I1374870300
+I2132047877
+I-1385624198
+I-245139206
+I1015139214
+I-926198559
+I1969798868
+I-1950480619
+I-559193432
+I-1256446518
+I-1983476981
+I790179655
+I1004289659
+I1541827617
+I1555805575
+I501127333
+I-1123446797
+I-453230915
+I2035104883
+I1296122398
+I-1843698604
+I-715464588
+I337143971
+I-1972119192
+I606777909
+I726977302
+I-1149501872
+I-1963733522
+I-1797504644
+I624
+tp2
+Ntp3
+b.
\ No newline at end of file
diff --git a/test-data/stdlib-samples/3.2/test/randv2_64.pck b/test-data/stdlib-samples/3.2/test/randv2_64.pck
new file mode 100644
index 0000000..090dd6f
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/randv2_64.pck
@@ -0,0 +1,633 @@
+crandom
+Random
+p0
+(tRp1
+(I2
+(I2147483648
+I1812115682
+I2741755497
+I1028055730
+I809166036
+I2773628650
+I62321950
+I535290043
+I349877800
+I976167039
+I2490696940
+I3631326955
+I2107991114
+I2941205793
+I3199611605
+I1871971556
+I1456108540
+I2984591044
+I140836801
+I4203227310
+I3652722980
+I4031971234
+I555769760
+I697301296
+I2347638880
+I3302335858
+I320255162
+I2553586608
+I1570224361
+I2838780912
+I2315834918
+I2351348158
+I3545433015
+I2292018579
+I1177569331
+I758497559
+I2913311175
+I1014948880
+I1793619243
+I3982451053
+I3850988342
+I2393984324
+I1583100093
+I3144742543
+I3655047493
+I3507532385
+I3094515442
+I350042434
+I2455294844
+I1038739312
+I313809152
+I189433072
+I1653165452
+I4186650593
+I19281455
+I2589680619
+I4145931590
+I4283266118
+I636283172
+I943618337
+I3170184633
+I2308766231
+I634615159
+I538152647
+I2079576891
+I1029442616
+I3410689412
+I1370292761
+I1071718978
+I2139496322
+I1876699543
+I3485866187
+I3157490130
+I1633105386
+I1453253160
+I3841322080
+I3789608924
+I4110770792
+I95083673
+I931354627
+I2065389591
+I3448339827
+I3348204577
+I3263528560
+I2411324590
+I4003055026
+I1869670093
+I2737231843
+I4150701155
+I2689667621
+I2993263224
+I3239890140
+I1191430483
+I1214399779
+I3623428533
+I1817058866
+I3052274451
+I326030082
+I1505129312
+I2306812262
+I1349150363
+I1099127895
+I2543465574
+I2396380193
+I503926466
+I1607109730
+I3451716817
+I58037114
+I4290081119
+I947517597
+I3083440186
+I520522630
+I2948962496
+I4184319574
+I2957636335
+I668374201
+I2325446473
+I472785314
+I3791932366
+I573017189
+I2185725379
+I1262251492
+I3525089379
+I2951262653
+I1305347305
+I940958122
+I3343754566
+I359371744
+I3874044973
+I396897232
+I147188248
+I716683703
+I4013880315
+I1133359586
+I1794612249
+I3480815192
+I3988787804
+I1729355809
+I573408542
+I1419310934
+I1770030447
+I3552845567
+I1693976502
+I1271189893
+I2298236738
+I2049219027
+I3464198070
+I1233574082
+I1007451781
+I1838253750
+I687096593
+I1131375603
+I1223013895
+I1490478435
+I339265439
+I4232792659
+I491538536
+I2816256769
+I1044097522
+I2566227049
+I748762793
+I1511830494
+I3593259822
+I4121279213
+I3735541309
+I3609794797
+I1939942331
+I377570434
+I1437957554
+I1831285696
+I55062811
+I2046783110
+I1303902283
+I1838349877
+I420993556
+I1256392560
+I2795216506
+I2783687924
+I3322303169
+I512794749
+I308405826
+I517164429
+I3320436022
+I1328403632
+I2269184746
+I3729522810
+I3304314450
+I2238756124
+I1690581361
+I3813277532
+I4119706879
+I2659447875
+I388818978
+I2064580814
+I1586227676
+I2627522685
+I2017792269
+I547928109
+I859107450
+I1062238929
+I858886237
+I3795783146
+I4173914756
+I3835915965
+I3329504821
+I3494579904
+I838863205
+I3399734724
+I4247387481
+I3618414834
+I2984433798
+I2165205561
+I4260685684
+I3045904244
+I3450093836
+I3597307595
+I3215851166
+I3162801328
+I2558283799
+I950068105
+I1829664117
+I3108542987
+I2378860527
+I790023460
+I280087750
+I1171478018
+I2333653728
+I3976932140
+I896746152
+I1802494195
+I1232873794
+I2749440836
+I2032037296
+I2012091682
+I1296131034
+I3892133385
+I908161334
+I2296791795
+I548169794
+I696265
+I893156828
+I426904709
+I3565374535
+I2655906825
+I2792178515
+I2406814632
+I4038847579
+I3123934642
+I2197503004
+I3535032597
+I2266216689
+I2117613462
+I1787448518
+I1875089416
+I2037165384
+I1140676321
+I3606296464
+I3229138231
+I2458267132
+I1874651171
+I3331900867
+I1000557654
+I1432861701
+I473636323
+I2691783927
+I1871437447
+I1328016401
+I4118690062
+I449467602
+I681789035
+I864889442
+I1200888928
+I75769445
+I4008690037
+I2464577667
+I4167795823
+I3070097648
+I2579174882
+I1216886568
+I3810116343
+I2249507485
+I3266903480
+I3671233480
+I100191658
+I3087121334
+I365063087
+I3821275176
+I2165052848
+I1282465245
+I3601570637
+I3132413236
+I2780570459
+I3222142917
+I3129794692
+I2611590811
+I947031677
+I2991908938
+I750997949
+I3632575131
+I1632014461
+I2846484755
+I2347261779
+I2903959448
+I1397316686
+I1904578392
+I774649578
+I3164598558
+I2429587609
+I738244516
+I1563304975
+I1399317414
+I1021316297
+I3187933234
+I2126780757
+I4011907847
+I4095169219
+I3358010054
+I2729978247
+I3736811646
+I3009656410
+I2893043637
+I4027447385
+I1239610110
+I1488806900
+I2674866844
+I442876374
+I2853687260
+I2785921005
+I3151378528
+I1180567
+I2803146964
+I982221759
+I2192919417
+I3087026181
+I2480838002
+I738452921
+I687986185
+I3049371676
+I3636492954
+I3468311299
+I2379621102
+I788988633
+I1643210601
+I2983998168
+I2492730801
+I2586048705
+I604073029
+I4121082815
+I1496476928
+I2972357110
+I2663116968
+I2642628592
+I2116052039
+I487186279
+I2577680328
+I3974766614
+I730776636
+I3842528855
+I1929093695
+I44626622
+I3989908833
+I1695426222
+I3675479382
+I3051784964
+I1514876613
+I1254036595
+I2420450649
+I3034377361
+I2332990590
+I1535175126
+I185834384
+I1107372900
+I1707278185
+I1286285295
+I3332574225
+I2785672437
+I883170645
+I2005666473
+I3403131327
+I4122021352
+I1464032858
+I3702576112
+I260554598
+I1837731650
+I2594435345
+I75771049
+I2012484289
+I3058649775
+I29979703
+I3861335335
+I2506495152
+I3786448704
+I442947790
+I2582724774
+I4291336243
+I2568189843
+I1923072690
+I1121589611
+I837696302
+I3284631720
+I3865021324
+I3576453165
+I2559531629
+I1459231762
+I3506550036
+I3754420159
+I2622000757
+I124228596
+I1084328605
+I1692830753
+I547273558
+I674282621
+I655259103
+I3188629610
+I490502174
+I2081001293
+I3191330704
+I4109943593
+I1859948504
+I3163806460
+I508833168
+I1256371033
+I2709253790
+I2068956572
+I3092842814
+I3913926529
+I2039638759
+I981982529
+I536094190
+I368855295
+I51993975
+I1597480732
+I4058175522
+I2155896702
+I3196251991
+I1081913893
+I3952353788
+I3545548108
+I2370669647
+I2206572308
+I2576392991
+I1732303374
+I1153136290
+I537641955
+I1738691747
+I3232854186
+I2539632206
+I2829760278
+I3058187853
+I1202425792
+I3762361970
+I2863949342
+I2640635867
+I376638744
+I1857679757
+I330798087
+I1457400505
+I1135610046
+I606400715
+I1859536026
+I509811335
+I529772308
+I2579273244
+I1890382004
+I3959908876
+I2612335971
+I2834052227
+I1434475986
+I3684202717
+I4015011345
+I582567852
+I3689969571
+I3934753460
+I3034960691
+I208573292
+I4004113742
+I3992904842
+I2587153719
+I3529179079
+I1565424987
+I779130678
+I1048582935
+I3213591622
+I3607793434
+I3951254937
+I2047811901
+I7508850
+I248544605
+I4210090324
+I2331490884
+I70057213
+I776474945
+I1345528889
+I3290403612
+I1664955269
+I1533143116
+I545003424
+I4141564478
+I1257326139
+I868843601
+I2337603029
+I1918131449
+I1843439523
+I1125519035
+I673340118
+I421408852
+I1520454906
+I1804722630
+I3621254196
+I2329968000
+I39464672
+I430583134
+I294026512
+I53978525
+I2892276105
+I1418863764
+I3419054451
+I1391595797
+I3544981798
+I4191780858
+I825672357
+I2972000844
+I1571305069
+I4231982845
+I3611916419
+I3045163168
+I2982349733
+I278572141
+I4215338078
+I839860504
+I1819151779
+I1412347479
+I1386770353
+I3914589491
+I3783104977
+I4124296733
+I830546258
+I89825624
+I4110601328
+I2545483429
+I300600527
+I516641158
+I3693021034
+I2852912854
+I3240039868
+I4167407959
+I1479557946
+I3621188804
+I1391590944
+I3578441128
+I1227055556
+I406898396
+I3064054983
+I25835338
+I402664165
+I4097682779
+I2106728012
+I203613622
+I3045467686
+I1381726438
+I3798670110
+I1342314961
+I3552497361
+I535913619
+I2625787583
+I1606574307
+I1101269630
+I1950513752
+I1121355862
+I3586816903
+I438529984
+I2473182121
+I1229997203
+I405445940
+I1695535315
+I427014336
+I3916768430
+I392298359
+I1884642868
+I1244730821
+I741058080
+I567479957
+I3527621168
+I3191971011
+I3267069104
+I4108668146
+I1520795587
+I166581006
+I473794477
+I1562126550
+I929843010
+I889533294
+I1266556608
+I874518650
+I3520162092
+I3013765049
+I4220231414
+I547246449
+I3998093769
+I3737193746
+I3872944207
+I793651876
+I2606384318
+I875991012
+I1394836334
+I4102011644
+I854380426
+I2618666767
+I2568302000
+I1995512132
+I229491093
+I2673500286
+I3364550739
+I3836923416
+I243656987
+I3944388983
+I4064949677
+I1416956378
+I1703244487
+I3990798829
+I2023425781
+I3926702214
+I1229015501
+I3174247824
+I624
+tp2
+Ntp3
+b.
\ No newline at end of file
diff --git a/test-data/stdlib-samples/3.2/test/randv3.pck b/test-data/stdlib-samples/3.2/test/randv3.pck
new file mode 100644
index 0000000..09fc38b
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/randv3.pck
@@ -0,0 +1,633 @@
+crandom
+Random
+p0
+(tRp1
+(I3
+(L2147483648L
+L994081831L
+L2806287265L
+L2228999830L
+L3396498069L
+L2956805457L
+L3273927761L
+L920726507L
+L1862624492L
+L2921292485L
+L1779526843L
+L2469105503L
+L251696293L
+L1254390717L
+L779197080L
+L3165356830L
+L2007365218L
+L1870028812L
+L2896519363L
+L1855578438L
+L979518416L
+L3481710246L
+L3191861507L
+L3993006593L
+L2967971479L
+L3353342753L
+L3576782572L
+L339685558L
+L2367675732L
+L116208555L
+L1220054437L
+L486597056L
+L1912115141L
+L1037044792L
+L4096904723L
+L3409146175L
+L3701651227L
+L315824610L
+L4138604583L
+L1385764892L
+L191878900L
+L2320582219L
+L3420677494L
+L2776503169L
+L1148247403L
+L829555069L
+L902064012L
+L2934642741L
+L2477108577L
+L2583928217L
+L1658612579L
+L2865447913L
+L129147346L
+L3691171887L
+L1569328110L
+L1372860143L
+L1054139183L
+L1617707080L
+L69020592L
+L3810271603L
+L1853953416L
+L3499803073L
+L1027545027L
+L3229043605L
+L250848720L
+L3324932626L
+L3537002962L
+L2494323345L
+L3238103962L
+L4147541579L
+L3636348186L
+L3025455083L
+L2678771977L
+L584700256L
+L3461826909L
+L854511420L
+L943463552L
+L3609239025L
+L3977577989L
+L253070090L
+L777394544L
+L2144086567L
+L1092947992L
+L854327284L
+L2222750082L
+L360183510L
+L1312466483L
+L3227531091L
+L2235022500L
+L3013060530L
+L2541091298L
+L3480126342L
+L1839762775L
+L2632608190L
+L1108889403L
+L3045050923L
+L731513126L
+L3505436788L
+L3062762017L
+L1667392680L
+L1354126500L
+L1143573930L
+L2816645702L
+L2100356873L
+L2817679106L
+L1210746010L
+L2409915248L
+L2910119964L
+L2309001420L
+L220351824L
+L3667352871L
+L3993148590L
+L2886160232L
+L4239393701L
+L1189270581L
+L3067985541L
+L147374573L
+L2355164869L
+L3696013550L
+L4227037846L
+L1905112743L
+L3312843689L
+L2930678266L
+L1828795355L
+L76933594L
+L3987100796L
+L1288361435L
+L3464529151L
+L965498079L
+L1444623093L
+L1372893415L
+L1536235597L
+L1341994850L
+L963594758L
+L2115295754L
+L982098685L
+L1053433904L
+L2078469844L
+L3059765792L
+L1753606181L
+L2130171254L
+L567588194L
+L529629426L
+L3621523534L
+L3027576564L
+L1176438083L
+L4096287858L
+L1168574683L
+L1425058962L
+L1429631655L
+L2902106759L
+L761900641L
+L1329183956L
+L1947050932L
+L447490289L
+L3282516276L
+L200037389L
+L921868197L
+L3331403999L
+L4088760249L
+L2188326318L
+L288401961L
+L1360802675L
+L314302808L
+L3314639210L
+L3749821203L
+L2286081570L
+L2768939062L
+L3200541016L
+L2133495482L
+L385029880L
+L4217232202L
+L3171617231L
+L1660846653L
+L2459987621L
+L2691776124L
+L4225030408L
+L3595396773L
+L1103680661L
+L539064057L
+L1492841101L
+L166195394L
+L757973658L
+L533893054L
+L2784879594L
+L1021821883L
+L2350548162L
+L176852116L
+L3503166025L
+L148079914L
+L1633466236L
+L2773090165L
+L1162846701L
+L3575737795L
+L1624178239L
+L2454894710L
+L3014691938L
+L526355679L
+L1870824081L
+L3362425857L
+L3907566665L
+L3462563184L
+L2229112004L
+L4203735748L
+L1557442481L
+L924133999L
+L1906634214L
+L880459727L
+L4065895870L
+L141426254L
+L1258450159L
+L3243115027L
+L1574958840L
+L313939294L
+L3055664260L
+L3459714255L
+L531778790L
+L509505506L
+L1620227491L
+L2675554942L
+L2516509560L
+L3797299887L
+L237135890L
+L3203142213L
+L1087745310L
+L1897151854L
+L3936590041L
+L132765167L
+L2385908063L
+L1360600289L
+L3574567769L
+L2752788114L
+L2644228966L
+L2377705183L
+L601277909L
+L4046480498L
+L324401408L
+L3279931760L
+L2227059377L
+L1538827493L
+L4220532064L
+L478044564L
+L2917117761L
+L635492832L
+L2319763261L
+L795944206L
+L1820473234L
+L1673151409L
+L1404095402L
+L1661067505L
+L3217106938L
+L2406310683L
+L1931309248L
+L2458622868L
+L3323670524L
+L3266852755L
+L240083943L
+L3168387397L
+L607722198L
+L1256837690L
+L3608124913L
+L4244969357L
+L1289959293L
+L519750328L
+L3229482463L
+L1105196988L
+L1832684479L
+L3761037224L
+L2363631822L
+L3297957711L
+L572766355L
+L1195822137L
+L2239207981L
+L2034241203L
+L163540514L
+L288160255L
+L716403680L
+L4019439143L
+L1536281935L
+L2345100458L
+L2786059178L
+L2822232109L
+L987025395L
+L3061166559L
+L490422513L
+L2551030115L
+L2638707620L
+L1344728502L
+L714108911L
+L2831719700L
+L2188615369L
+L373509061L
+L1351077504L
+L3136217056L
+L783521095L
+L2554949468L
+L2662499550L
+L1203826951L
+L1379632388L
+L1918858985L
+L607465976L
+L1980450237L
+L3540079211L
+L3397813410L
+L2913309266L
+L2289572621L
+L4133935327L
+L4166227663L
+L3371801704L
+L3065474909L
+L3580562343L
+L3832172378L
+L2556130719L
+L310473705L
+L3734014346L
+L2490413810L
+L347233056L
+L526668037L
+L1158393656L
+L544329703L
+L2150085419L
+L3914038146L
+L1060237586L
+L4159394837L
+L113205121L
+L309966775L
+L4098784465L
+L3635222960L
+L2417516569L
+L2089579233L
+L1725807541L
+L2728122526L
+L2365836523L
+L2504078522L
+L1443946869L
+L2384171411L
+L997046534L
+L3249131657L
+L1699875986L
+L3618097146L
+L1716038224L
+L2629818607L
+L2929217876L
+L1367250314L
+L1726434951L
+L1388496325L
+L2107602181L
+L2822366842L
+L3052979190L
+L3796798633L
+L1543813381L
+L959000121L
+L1363845999L
+L2952528150L
+L874184932L
+L1888387194L
+L2328695295L
+L3442959855L
+L841805947L
+L1087739275L
+L3230005434L
+L3045399265L
+L1161817318L
+L2898673139L
+L860011094L
+L940539782L
+L1297818080L
+L4243941623L
+L1577613033L
+L4204131887L
+L3819057225L
+L1969439558L
+L3297963932L
+L241874069L
+L3517033453L
+L2295345664L
+L1098911422L
+L886955008L
+L1477397621L
+L4279347332L
+L3616558791L
+L2384411957L
+L742537731L
+L764221540L
+L2871698900L
+L3530636393L
+L691256644L
+L758730966L
+L1717773090L
+L2751856377L
+L3188484000L
+L3767469670L
+L1623863053L
+L3533236793L
+L4099284176L
+L723921107L
+L310594036L
+L223978745L
+L2266565776L
+L201843303L
+L2969968546L
+L3351170888L
+L3465113624L
+L2712246712L
+L1521383057L
+L2384461798L
+L216357551L
+L2167301975L
+L3144653194L
+L2781220155L
+L3620747666L
+L95971265L
+L4255400243L
+L59999757L
+L4174273472L
+L3974511524L
+L1007123950L
+L3112477628L
+L806461512L
+L3148074008L
+L528352882L
+L2545979588L
+L2562281969L
+L3010249477L
+L1886331611L
+L3210656433L
+L1034099976L
+L2906893579L
+L1197048779L
+L1870004401L
+L3898300490L
+L2686856402L
+L3975723478L
+L613043532L
+L2565674353L
+L3760045310L
+L3468984376L
+L4126258L
+L303855424L
+L3988963552L
+L276256796L
+L544071807L
+L1023872062L
+L1747461519L
+L1975571260L
+L4033766958L
+L2946555557L
+L1492957796L
+L958271685L
+L46480515L
+L907760635L
+L1306626357L
+L819652378L
+L1172300279L
+L1116851319L
+L495601075L
+L1157715330L
+L534220108L
+L377320028L
+L1672286106L
+L2066219284L
+L1842386355L
+L2546059464L
+L1839457336L
+L3476194446L
+L3050550028L
+L594705582L
+L1905813535L
+L1813033412L
+L2700858157L
+L169067972L
+L4252889045L
+L1921944555L
+L497671474L
+L210143935L
+L2688398489L
+L325158375L
+L3450846447L
+L891760597L
+L712802536L
+L1132557436L
+L1417044075L
+L1639889660L
+L1746379970L
+L1478741647L
+L2817563486L
+L2573612532L
+L4266444457L
+L2911601615L
+L804745411L
+L2207254652L
+L1189140646L
+L3829725111L
+L3637367348L
+L1944731747L
+L2193440343L
+L1430195413L
+L1173515229L
+L1582618217L
+L2070767037L
+L247908936L
+L1460675439L
+L556001596L
+L327629335L
+L1036133876L
+L4228129605L
+L999174048L
+L3635804039L
+L1416550481L
+L1270540269L
+L4280743815L
+L39607659L
+L1552540623L
+L2762294062L
+L504137289L
+L4117044239L
+L1417130225L
+L1342970056L
+L1755716449L
+L1169447322L
+L2731401356L
+L2319976745L
+L2869221479L
+L23972655L
+L2251495389L
+L1429860878L
+L3728135992L
+L4241432973L
+L3698275076L
+L216416432L
+L4040046960L
+L246077176L
+L894675685L
+L3932282259L
+L3097205100L
+L2128818650L
+L1319010656L
+L1601974009L
+L2552960957L
+L3554016055L
+L4209395641L
+L2013340102L
+L3370447801L
+L2307272002L
+L1795091354L
+L202109401L
+L988345070L
+L2514870758L
+L1132726850L
+L582746224L
+L3112305421L
+L1843020683L
+L3600189223L
+L1101349165L
+L4211905855L
+L2866677581L
+L2881621130L
+L4165324109L
+L4238773191L
+L3635649550L
+L2670481044L
+L2996248219L
+L1676992480L
+L3473067050L
+L4205793699L
+L4019490897L
+L1579990481L
+L1899617990L
+L1136347713L
+L1802842268L
+L3591752960L
+L1197308739L
+L433629786L
+L4032142790L
+L3148041979L
+L3312138845L
+L3896860449L
+L3298182567L
+L907605170L
+L1658664067L
+L2682980313L
+L2523523173L
+L1208722103L
+L3808530363L
+L1079003946L
+L4282402864L
+L2041010073L
+L2667555071L
+L688018180L
+L1405121012L
+L4167994076L
+L3504695336L
+L1923944749L
+L1143598790L
+L3936268898L
+L3606243846L
+L1017420080L
+L4026211169L
+L596529763L
+L1844259624L
+L2840216282L
+L2673807759L
+L3407202575L
+L2737971083L
+L4075423068L
+L3684057432L
+L3146627241L
+L599650513L
+L69773114L
+L1257035919L
+L807485291L
+L2376230687L
+L3036593147L
+L2642411658L
+L106080044L
+L2199622729L
+L291834511L
+L2697611361L
+L11689733L
+L625123952L
+L3226023062L
+L3229663265L
+L753059444L
+L2843610189L
+L624L
+tp2
+Ntp3
+b.
\ No newline at end of file
diff --git a/test-data/stdlib-samples/3.2/test/subprocessdata/fd_status.py b/test-data/stdlib-samples/3.2/test/subprocessdata/fd_status.py
new file mode 100644
index 0000000..1f61e13
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/subprocessdata/fd_status.py
@@ -0,0 +1,24 @@
+"""When called as a script, print a comma-separated list of the open
+file descriptors on stdout."""
+
+import errno
+import os
+
+try:
+    _MAXFD = os.sysconf("SC_OPEN_MAX")
+except:
+    _MAXFD = 256
+
+if __name__ == "__main__":
+    fds = []
+    for fd in range(0, _MAXFD):
+        try:
+            st = os.fstat(fd)
+        except OSError as e:
+            if e.errno == errno.EBADF:
+                continue
+            raise
+        # Ignore Solaris door files
+        if st.st_mode & 0xF000 != 0xd000:
+            fds.append(fd)
+    print(','.join(map(str, fds)))
diff --git a/test-data/stdlib-samples/3.2/test/subprocessdata/input_reader.py b/test-data/stdlib-samples/3.2/test/subprocessdata/input_reader.py
new file mode 100644
index 0000000..1dc3191
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/subprocessdata/input_reader.py
@@ -0,0 +1,7 @@
+"""When called as a script, consumes the input"""
+
+import sys
+
+if __name__ == "__main__":
+    for line in sys.stdin:
+        pass
diff --git a/test-data/stdlib-samples/3.2/test/subprocessdata/qcat.py b/test-data/stdlib-samples/3.2/test/subprocessdata/qcat.py
new file mode 100644
index 0000000..fe6f9db
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/subprocessdata/qcat.py
@@ -0,0 +1,7 @@
+"""When ran as a script, simulates cat with no arguments."""
+
+import sys
+
+if __name__ == "__main__":
+    for line in sys.stdin:
+        sys.stdout.write(line)
diff --git a/test-data/stdlib-samples/3.2/test/subprocessdata/qgrep.py b/test-data/stdlib-samples/3.2/test/subprocessdata/qgrep.py
new file mode 100644
index 0000000..6990637
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/subprocessdata/qgrep.py
@@ -0,0 +1,10 @@
+"""When called with a single argument, simulated fgrep with a single
+argument and no options."""
+
+import sys
+
+if __name__ == "__main__":
+    pattern = sys.argv[1]
+    for line in sys.stdin:
+        if pattern in line:
+            sys.stdout.write(line)
diff --git a/test-data/stdlib-samples/3.2/test/subprocessdata/sigchild_ignore.py b/test-data/stdlib-samples/3.2/test/subprocessdata/sigchild_ignore.py
new file mode 100644
index 0000000..6072aec
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/subprocessdata/sigchild_ignore.py
@@ -0,0 +1,6 @@
+import signal, subprocess, sys
+# On Linux this causes os.waitpid to fail with OSError as the OS has already
+# reaped our child process.  The wait() passing the OSError on to the caller
+# and causing us to exit with an error is what we are testing against.
+signal.signal(signal.SIGCHLD, signal.SIG_IGN)
+subprocess.Popen([sys.executable, '-c', 'print("albatross")']).wait()
diff --git a/test-data/stdlib-samples/3.2/test/support.py b/test-data/stdlib-samples/3.2/test/support.py
new file mode 100644
index 0000000..a36ba28
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/support.py
@@ -0,0 +1,1602 @@
+"""Supporting definitions for the Python regression tests."""
+
+if __name__ != 'test.support':
+    raise ImportError('support must be imported from the test package')
+
+import contextlib
+import errno
+import functools
+import gc
+import socket
+import sys
+import os
+import platform
+import shutil
+import warnings
+import unittest
+import importlib
+import collections
+import re
+import subprocess
+import imp
+import time
+import sysconfig
+import fnmatch
+import logging.handlers
+
+import _thread, threading
+from typing import Any, Dict, cast
+#try:
+#    import multiprocessing.process
+#except ImportError:
+#    multiprocessing = None
+
+
+__all__ = [
+    "Error", "TestFailed", "ResourceDenied", "import_module",
+    "verbose", "use_resources", "max_memuse", "record_original_stdout",
+    "get_original_stdout", "unload", "unlink", "rmtree", "forget",
+    "is_resource_enabled", "requires", "requires_mac_ver",
+    "find_unused_port", "bind_port",
+    "fcmp", "is_jython", "TESTFN", "HOST", "FUZZ", "SAVEDCWD", "temp_cwd",
+    "findfile", "sortdict", "check_syntax_error", "open_urlresource",
+    "check_warnings", "CleanImport", "EnvironmentVarGuard",
+    "TransientResource", "captured_output", "captured_stdout",
+    "captured_stdin", "captured_stderr",
+    "time_out", "socket_peer_reset", "ioerror_peer_reset",
+    "run_with_locale", 'temp_umask', "transient_internet",
+    "set_memlimit", "bigmemtest", "bigaddrspacetest", "BasicTestRunner",
+    "run_unittest", "run_doctest", "threading_setup", "threading_cleanup",
+    "reap_children", "cpython_only", "check_impl_detail", "get_attribute",
+    "swap_item", "swap_attr", "requires_IEEE_754",
+    "TestHandler", "Matcher", "can_symlink", "skip_unless_symlink",
+    "import_fresh_module", "failfast",
+    ]
+
+class Error(Exception):
+    """Base class for regression test exceptions."""
+
+class TestFailed(Error):
+    """Test failed."""
+
+class ResourceDenied(unittest.SkipTest):
+    """Test skipped because it requested a disallowed resource.
+
+    This is raised when a test calls requires() for a resource that
+    has not be enabled.  It is used to distinguish between expected
+    and unexpected skips.
+    """
+
+ at contextlib.contextmanager
+def _ignore_deprecated_imports(ignore=True):
+    """Context manager to suppress package and module deprecation
+    warnings when importing them.
+
+    If ignore is False, this context manager has no effect."""
+    if ignore:
+        with warnings.catch_warnings():
+            warnings.filterwarnings("ignore", ".+ (module|package)",
+                                    DeprecationWarning)
+            yield None
+    else:
+        yield None
+
+
+def import_module(name, deprecated=False):
+    """Import and return the module to be tested, raising SkipTest if
+    it is not available.
+
+    If deprecated is True, any module or package deprecation messages
+    will be suppressed."""
+    with _ignore_deprecated_imports(deprecated):
+        try:
+            return importlib.import_module(name)
+        except ImportError as msg:
+            raise unittest.SkipTest(str(msg))
+
+
+def _save_and_remove_module(name, orig_modules):
+    """Helper function to save and remove a module from sys.modules
+
+       Raise ImportError if the module can't be imported."""
+    # try to import the module and raise an error if it can't be imported
+    if name not in sys.modules:
+        __import__(name)
+        del sys.modules[name]
+    for modname in list(sys.modules):
+        if modname == name or modname.startswith(name + '.'):
+            orig_modules[modname] = sys.modules[modname]
+            del sys.modules[modname]
+
+def _save_and_block_module(name, orig_modules):
+    """Helper function to save and block a module in sys.modules
+
+       Return True if the module was in sys.modules, False otherwise."""
+    saved = True
+    try:
+        orig_modules[name] = sys.modules[name]
+    except KeyError:
+        saved = False
+    sys.modules[name] = None
+    return saved
+
+
+def import_fresh_module(name, fresh=(), blocked=(), deprecated=False):
+    """Imports and returns a module, deliberately bypassing the sys.modules cache
+    and importing a fresh copy of the module. Once the import is complete,
+    the sys.modules cache is restored to its original state.
+
+    Modules named in fresh are also imported anew if needed by the import.
+    If one of these modules can't be imported, None is returned.
+
+    Importing of modules named in blocked is prevented while the fresh import
+    takes place.
+
+    If deprecated is True, any module or package deprecation messages
+    will be suppressed."""
+    # NOTE: test_heapq, test_json and test_warnings include extra sanity checks
+    # to make sure that this utility function is working as expected
+    with _ignore_deprecated_imports(deprecated):
+        # Keep track of modules saved for later restoration as well
+        # as those which just need a blocking entry removed
+        orig_modules = {}
+        names_to_remove = []
+        _save_and_remove_module(name, orig_modules)
+        try:
+            for fresh_name in fresh:
+                _save_and_remove_module(fresh_name, orig_modules)
+            for blocked_name in blocked:
+                if not _save_and_block_module(blocked_name, orig_modules):
+                    names_to_remove.append(blocked_name)
+            fresh_module = importlib.import_module(name)
+        except ImportError:
+            fresh_module = None
+        finally:
+            for orig_name, module in orig_modules.items():
+                sys.modules[orig_name] = module
+            for name_to_remove in names_to_remove:
+                del sys.modules[name_to_remove]
+        return fresh_module
+
+
+def get_attribute(obj, name):
+    """Get an attribute, raising SkipTest if AttributeError is raised."""
+    try:
+        attribute = getattr(obj, name)
+    except AttributeError:
+        raise unittest.SkipTest("module %s has no attribute %s" % (
+            obj.__name__, name))
+    else:
+        return attribute
+
+verbose = 1              # Flag set to 0 by regrtest.py
+use_resources = None # type: Any     # Flag set to [] by regrtest.py
+max_memuse = 0           # Disable bigmem tests (they will still be run with
+                         # small sizes, to make sure they work.)
+real_max_memuse = 0
+failfast = False
+match_tests = None # type: Any
+
+# _original_stdout is meant to hold stdout at the time regrtest began.
+# This may be "the real" stdout, or IDLE's emulation of stdout, or whatever.
+# The point is to have some flavor of stdout the user can actually see.
+_original_stdout = None # type: 'Any'
+def record_original_stdout(stdout):
+    global _original_stdout
+    _original_stdout = stdout
+
+def get_original_stdout():
+    return _original_stdout or sys.stdout
+
+def unload(name):
+    try:
+        del sys.modules[name]
+    except KeyError:
+        pass
+
+def unlink(filename):
+    try:
+        os.unlink(filename)
+    except OSError as error:
+        # The filename need not exist.
+        if error.errno not in (errno.ENOENT, errno.ENOTDIR):
+            raise
+
+def rmtree(path):
+    try:
+        shutil.rmtree(path)
+    except OSError as error:
+        # Unix returns ENOENT, Windows returns ESRCH.
+        if error.errno not in (errno.ENOENT, errno.ESRCH):
+            raise
+
+def make_legacy_pyc(source):
+    """Move a PEP 3147 pyc/pyo file to its legacy pyc/pyo location.
+
+    The choice of .pyc or .pyo extension is done based on the __debug__ flag
+    value.
+
+    :param source: The file system path to the source file.  The source file
+        does not need to exist, however the PEP 3147 pyc file must exist.
+    :return: The file system path to the legacy pyc file.
+    """
+    pyc_file = imp.cache_from_source(source)
+    up_one = os.path.dirname(os.path.abspath(source))
+    if __debug__:
+        ch = 'c'
+    else:
+        ch = 'o'
+    legacy_pyc = os.path.join(up_one, source + ch)
+    os.rename(pyc_file, legacy_pyc)
+    return legacy_pyc
+
+def forget(modname):
+    """'Forget' a module was ever imported.
+
+    This removes the module from sys.modules and deletes any PEP 3147 or
+    legacy .pyc and .pyo files.
+    """
+    unload(modname)
+    for dirname in sys.path:
+        source = os.path.join(dirname, modname + '.py')
+        # It doesn't matter if they exist or not, unlink all possible
+        # combinations of PEP 3147 and legacy pyc and pyo files.
+        unlink(source + 'c')
+        unlink(source + 'o')
+        unlink(imp.cache_from_source(source, debug_override=True))
+        unlink(imp.cache_from_source(source, debug_override=False))
+
+# On some platforms, should not run gui test even if it is allowed
+# in `use_resources'.
+#if sys.platform.startswith('win'):
+    #import ctypes
+    #import ctypes.wintypes
+    #def _is_gui_available():
+    #    UOI_FLAGS = 1
+    #    WSF_VISIBLE = 0x0001
+    #    class USEROBJECTFLAGS(ctypes.Structure):
+    #        _fields_ = [("fInherit", ctypes.wintypes.BOOL),
+    #                    ("fReserved", ctypes.wintypes.BOOL),
+    #                    ("dwFlags", ctypes.wintypes.DWORD)]
+    #    dll = ctypes.windll.user32
+    #    h = dll.GetProcessWindowStation()
+    #    if not h:
+    #        raise ctypes.WinError()
+    #    uof = USEROBJECTFLAGS()
+    #    needed = ctypes.wintypes.DWORD()
+    #    res = dll.GetUserObjectInformationW(h,
+    #        UOI_FLAGS,
+    #        ctypes.byref(uof),
+    #        ctypes.sizeof(uof),
+    #        ctypes.byref(needed))
+    #    if not res:
+    #        raise ctypes.WinError()
+    #    return bool(uof.dwFlags & WSF_VISIBLE)
+#else:
+def _is_gui_available():
+    return True
+
+def is_resource_enabled(resource):
+    """Test whether a resource is enabled.  Known resources are set by
+    regrtest.py."""
+    return use_resources is not None and resource in use_resources
+
+def requires(resource, msg=None):
+    """Raise ResourceDenied if the specified resource is not available.
+
+    If the caller's module is __main__ then automatically return True.  The
+    possibility of False being returned occurs when regrtest.py is
+    executing.
+    """
+    if resource == 'gui' and not _is_gui_available():
+        raise unittest.SkipTest("Cannot use the 'gui' resource")
+    # see if the caller's module is __main__ - if so, treat as if
+    # the resource was set
+    if sys._getframe(1).f_globals.get("__name__") == "__main__":
+        return
+    if not is_resource_enabled(resource):
+        if msg is None:
+            msg = "Use of the `%s' resource not enabled" % resource
+        raise ResourceDenied(msg)
+
+def requires_mac_ver(*min_version):
+    """Decorator raising SkipTest if the OS is Mac OS X and the OS X
+    version if less than min_version.
+
+    For example, @requires_mac_ver(10, 5) raises SkipTest if the OS X version
+    is lesser than 10.5.
+    """
+    def decorator(func):
+        @functools.wraps(func)
+        def wrapper(*args, **kw):
+            if sys.platform == 'darwin':
+                version_txt = platform.mac_ver()[0]
+                try:
+                    version = tuple(map(int, version_txt.split('.')))
+                except ValueError:
+                    pass
+                else:
+                    if version < min_version:
+                        min_version_txt = '.'.join(map(str, min_version))
+                        raise unittest.SkipTest(
+                            "Mac OS X %s or higher required, not %s"
+                            % (min_version_txt, version_txt))
+            return func(*args, **kw)
+        wrapper.min_version = min_version
+        return wrapper
+    return decorator
+
+HOST = 'localhost'
+
+def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM):
+    """Returns an unused port that should be suitable for binding.  This is
+    achieved by creating a temporary socket with the same family and type as
+    the 'sock' parameter (default is AF_INET, SOCK_STREAM), and binding it to
+    the specified host address (defaults to 0.0.0.0) with the port set to 0,
+    eliciting an unused ephemeral port from the OS.  The temporary socket is
+    then closed and deleted, and the ephemeral port is returned.
+
+    Either this method or bind_port() should be used for any tests where a
+    server socket needs to be bound to a particular port for the duration of
+    the test.  Which one to use depends on whether the calling code is creating
+    a python socket, or if an unused port needs to be provided in a constructor
+    or passed to an external program (i.e. the -accept argument to openssl's
+    s_server mode).  Always prefer bind_port() over find_unused_port() where
+    possible.  Hard coded ports should *NEVER* be used.  As soon as a server
+    socket is bound to a hard coded port, the ability to run multiple instances
+    of the test simultaneously on the same host is compromised, which makes the
+    test a ticking time bomb in a buildbot environment. On Unix buildbots, this
+    may simply manifest as a failed test, which can be recovered from without
+    intervention in most cases, but on Windows, the entire python process can
+    completely and utterly wedge, requiring someone to log in to the buildbot
+    and manually kill the affected process.
+
+    (This is easy to reproduce on Windows, unfortunately, and can be traced to
+    the SO_REUSEADDR socket option having different semantics on Windows versus
+    Unix/Linux.  On Unix, you can't have two AF_INET SOCK_STREAM sockets bind,
+    listen and then accept connections on identical host/ports.  An EADDRINUSE
+    socket.error will be raised at some point (depending on the platform and
+    the order bind and listen were called on each socket).
+
+    However, on Windows, if SO_REUSEADDR is set on the sockets, no EADDRINUSE
+    will ever be raised when attempting to bind two identical host/ports. When
+    accept() is called on each socket, the second caller's process will steal
+    the port from the first caller, leaving them both in an awkwardly wedged
+    state where they'll no longer respond to any signals or graceful kills, and
+    must be forcibly killed via OpenProcess()/TerminateProcess().
+
+    The solution on Windows is to use the SO_EXCLUSIVEADDRUSE socket option
+    instead of SO_REUSEADDR, which effectively affords the same semantics as
+    SO_REUSEADDR on Unix.  Given the propensity of Unix developers in the Open
+    Source world compared to Windows ones, this is a common mistake.  A quick
+    look over OpenSSL's 0.9.8g source shows that they use SO_REUSEADDR when
+    openssl.exe is called with the 's_server' option, for example. See
+    http://bugs.python.org/issue2550 for more info.  The following site also
+    has a very thorough description about the implications of both REUSEADDR
+    and EXCLUSIVEADDRUSE on Windows:
+    http://msdn2.microsoft.com/en-us/library/ms740621(VS.85).aspx)
+
+    XXX: although this approach is a vast improvement on previous attempts to
+    elicit unused ports, it rests heavily on the assumption that the ephemeral
+    port returned to us by the OS won't immediately be dished back out to some
+    other process when we close and delete our temporary socket but before our
+    calling code has a chance to bind the returned port.  We can deal with this
+    issue if/when we come across it.
+    """
+
+    tempsock = socket.socket(family, socktype)
+    port = bind_port(tempsock)
+    tempsock.close()
+    #del tempsock
+    return port
+
+def bind_port(sock, host=HOST):
+    """Bind the socket to a free port and return the port number.  Relies on
+    ephemeral ports in order to ensure we are using an unbound port.  This is
+    important as many tests may be running simultaneously, especially in a
+    buildbot environment.  This method raises an exception if the sock.family
+    is AF_INET and sock.type is SOCK_STREAM, *and* the socket has SO_REUSEADDR
+    or SO_REUSEPORT set on it.  Tests should *never* set these socket options
+    for TCP/IP sockets.  The only case for setting these options is testing
+    multicasting via multiple UDP sockets.
+
+    Additionally, if the SO_EXCLUSIVEADDRUSE socket option is available (i.e.
+    on Windows), it will be set on the socket.  This will prevent anyone else
+    from bind()'ing to our host/port for the duration of the test.
+    """
+
+    if sock.family == socket.AF_INET and sock.type == socket.SOCK_STREAM:
+        if hasattr(socket, 'SO_REUSEADDR'):
+            if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) == 1:
+                raise TestFailed("tests should never set the SO_REUSEADDR "   \
+                                 "socket option on TCP/IP sockets!")
+        if hasattr(socket, 'SO_REUSEPORT'):
+            if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 1:
+                raise TestFailed("tests should never set the SO_REUSEPORT "   \
+                                 "socket option on TCP/IP sockets!")
+        if hasattr(socket, 'SO_EXCLUSIVEADDRUSE'):
+            sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1)
+
+    sock.bind((host, 0))
+    port = sock.getsockname()[1]
+    return port
+
+FUZZ = 1e-6
+
+def fcmp(x, y): # fuzzy comparison function
+    if isinstance(x, float) or isinstance(y, float):
+        try:
+            fuzz = (abs(x) + abs(y)) * FUZZ
+            if abs(x-y) <= fuzz:
+                return 0
+        except:
+            pass
+    elif type(x) == type(y) and isinstance(x, (tuple, list)):
+        for i in range(min(len(x), len(y))):
+            outcome = fcmp(x[i], y[i])
+            if outcome != 0:
+                return outcome
+        return (len(x) > len(y)) - (len(x) < len(y))
+    return (x > y) - (x < y)
+
+# decorator for skipping tests on non-IEEE 754 platforms
+requires_IEEE_754 = unittest.skipUnless(
+    cast(Any, float).__getformat__("double").startswith("IEEE"),
+    "test requires IEEE 754 doubles")
+
+is_jython = sys.platform.startswith('java')
+
+TESTFN = ''
+# Filename used for testing
+if os.name == 'java':
+    # Jython disallows @ in module names
+    TESTFN = '$test'
+else:
+    TESTFN = '@test'
+
+# Disambiguate TESTFN for parallel testing, while letting it remain a valid
+# module name.
+TESTFN = "{}_{}_tmp".format(TESTFN, os.getpid())
+
+
+# TESTFN_UNICODE is a non-ascii filename
+TESTFN_UNICODE = TESTFN + "-\xe0\xf2\u0258\u0141\u011f"
+if sys.platform == 'darwin':
+    # In Mac OS X's VFS API file names are, by definition, canonically
+    # decomposed Unicode, encoded using UTF-8. See QA1173:
+    # http://developer.apple.com/mac/library/qa/qa2001/qa1173.html
+    import unicodedata
+    TESTFN_UNICODE = unicodedata.normalize('NFD', TESTFN_UNICODE)
+TESTFN_ENCODING = sys.getfilesystemencoding()
+
+# TESTFN_UNENCODABLE is a filename (str type) that should *not* be able to be
+# encoded by the filesystem encoding (in strict mode). It can be None if we
+# cannot generate such filename.
+TESTFN_UNENCODABLE = None # type: Any
+if os.name in ('nt', 'ce'):
+    # skip win32s (0) or Windows 9x/ME (1)
+    if sys.getwindowsversion().platform >= 2:
+        # Different kinds of characters from various languages to minimize the
+        # probability that the whole name is encodable to MBCS (issue #9819)
+        TESTFN_UNENCODABLE = TESTFN + "-\u5171\u0141\u2661\u0363\uDC80"
+        try:
+            TESTFN_UNENCODABLE.encode(TESTFN_ENCODING)
+        except UnicodeEncodeError:
+            pass
+        else:
+            print('WARNING: The filename %r CAN be encoded by the filesystem encoding (%s). '
+                  'Unicode filename tests may not be effective'
+                  % (TESTFN_UNENCODABLE, TESTFN_ENCODING))
+            TESTFN_UNENCODABLE = None
+# Mac OS X denies unencodable filenames (invalid utf-8)
+elif sys.platform != 'darwin':
+    try:
+        # ascii and utf-8 cannot encode the byte 0xff
+        b'\xff'.decode(TESTFN_ENCODING)
+    except UnicodeDecodeError:
+        # 0xff will be encoded using the surrogate character u+DCFF
+        TESTFN_UNENCODABLE = TESTFN \
+            + b'-\xff'.decode(TESTFN_ENCODING, 'surrogateescape')
+    else:
+        # File system encoding (eg. ISO-8859-* encodings) can encode
+        # the byte 0xff. Skip some unicode filename tests.
+        pass
+
+# Save the initial cwd
+SAVEDCWD = os.getcwd()
+
+ at contextlib.contextmanager
+def temp_cwd(name='tempcwd', quiet=False, path=None):
+    """
+    Context manager that temporarily changes the CWD.
+
+    An existing path may be provided as *path*, in which case this
+    function makes no changes to the file system.
+
+    Otherwise, the new CWD is created in the current directory and it's
+    named *name*. If *quiet* is False (default) and it's not possible to
+    create or change the CWD, an error is raised.  If it's True, only a
+    warning is raised and the original CWD is used.
+    """
+    saved_dir = os.getcwd()
+    is_temporary = False
+    if path is None:
+        path = name
+        try:
+            os.mkdir(name)
+            is_temporary = True
+        except OSError:
+            if not quiet:
+                raise
+            warnings.warn('tests may fail, unable to create temp CWD ' + name,
+                          RuntimeWarning, stacklevel=3)
+    try:
+        os.chdir(path)
+    except OSError:
+        if not quiet:
+            raise
+        warnings.warn('tests may fail, unable to change the CWD to ' + name,
+                      RuntimeWarning, stacklevel=3)
+    try:
+        yield os.getcwd()
+    finally:
+        os.chdir(saved_dir)
+        if is_temporary:
+            rmtree(name)
+
+
+ at contextlib.contextmanager
+def temp_umask(umask):
+    """Context manager that temporarily sets the process umask."""
+    oldmask = os.umask(umask)
+    try:
+        yield None
+    finally:
+        os.umask(oldmask)
+
+
+def findfile(file, here=__file__, subdir=None):
+    """Try to find a file on sys.path and the working directory.  If it is not
+    found the argument passed to the function is returned (this does not
+    necessarily signal failure; could still be the legitimate path)."""
+    if os.path.isabs(file):
+        return file
+    if subdir is not None:
+        file = os.path.join(subdir, file)
+    path = sys.path
+    path = [os.path.dirname(here)] + path
+    for dn in path:
+        fn = os.path.join(dn, file)
+        if os.path.exists(fn): return fn
+    return file
+
+def sortdict(dict):
+    "Like repr(dict), but in sorted order."
+    items = sorted(dict.items())
+    reprpairs = ["%r: %r" % pair for pair in items]
+    withcommas = ", ".join(reprpairs)
+    return "{%s}" % withcommas
+
+def make_bad_fd():
+    """
+    Create an invalid file descriptor by opening and closing a file and return
+    its fd.
+    """
+    file = open(TESTFN, "wb")
+    try:
+        return file.fileno()
+    finally:
+        file.close()
+        unlink(TESTFN)
+
+def check_syntax_error(testcase, statement):
+    raise NotImplementedError('no compile built-in')
+    #testcase.assertRaises(SyntaxError, compile, statement,
+    #                      '<test string>', 'exec')
+
+def open_urlresource(url, *args, **kw):
+    from urllib import request, parse
+
+    check = kw.pop('check', None)
+
+    filename = parse.urlparse(url)[2].split('/')[-1] # '/': it's URL!
+
+    fn = os.path.join(os.path.dirname(__file__), "data", filename)
+
+    def check_valid_file(fn):
+        f = open(fn, *args, **kw)
+        if check is None:
+            return f
+        elif check(f):
+            f.seek(0)
+            return f
+        f.close()
+
+    if os.path.exists(fn):
+        f = check_valid_file(fn)
+        if f is not None:
+            return f
+        unlink(fn)
+
+    # Verify the requirement before downloading the file
+    requires('urlfetch')
+
+    print('\tfetching %s ...' % url, file=get_original_stdout())
+    f = request.urlopen(url, timeout=15)
+    try:
+        with open(fn, "wb") as out:
+            s = f.read()
+            while s:
+                out.write(s)
+                s = f.read()
+    finally:
+        f.close()
+
+    f = check_valid_file(fn)
+    if f is not None:
+        return f
+    raise TestFailed('invalid resource "%s"' % fn)
+
+
+class WarningsRecorder(object):
+    """Convenience wrapper for the warnings list returned on
+       entry to the warnings.catch_warnings() context manager.
+    """
+    def __init__(self, warnings_list):
+        self._warnings = warnings_list
+        self._last = 0
+
+    def __getattr__(self, attr):
+        if len(self._warnings) > self._last:
+            return getattr(self._warnings[-1], attr)
+        elif attr in warnings.WarningMessage._WARNING_DETAILS:
+            return None
+        raise AttributeError("%r has no attribute %r" % (self, attr))
+
+    #@property
+    #def warnings(self):
+    #    return self._warnings[self._last:]
+
+    def reset(self):
+        self._last = len(self._warnings)
+
+
+def _filterwarnings(filters, quiet=False):
+    """Catch the warnings, then check if all the expected
+    warnings have been raised and re-raise unexpected warnings.
+    If 'quiet' is True, only re-raise the unexpected warnings.
+    """
+    # Clear the warning registry of the calling module
+    # in order to re-raise the warnings.
+    frame = sys._getframe(2)
+    registry = frame.f_globals.get('__warningregistry__')
+    if registry:
+        registry.clear()
+    with warnings.catch_warnings(record=True) as w:
+        # Set filter "always" to record all warnings.  Because
+        # test_warnings swap the module, we need to look up in
+        # the sys.modules dictionary.
+        sys.modules['warnings'].simplefilter("always")
+        yield WarningsRecorder(w)
+    # Filter the recorded warnings
+    reraise = list(w)
+    missing = []
+    for msg, cat in filters:
+        seen = False
+        for w in reraise[:]:
+            warning = w.message
+            # Filter out the matching messages
+            if (re.match(msg, str(warning), re.I) and
+                issubclass(warning.__class__, cat)):
+                seen = True
+                reraise.remove(w)
+        if not seen and not quiet:
+            # This filter caught nothing
+            missing.append((msg, cat.__name__))
+    if reraise:
+        raise AssertionError("unhandled warning %s" % reraise[0])
+    if missing:
+        raise AssertionError("filter (%r, %s) did not catch any warning" %
+                             missing[0])
+
+
+ at contextlib.contextmanager
+def check_warnings(*filters, **kwargs):
+    """Context manager to silence warnings.
+
+    Accept 2-tuples as positional arguments:
+        ("message regexp", WarningCategory)
+
+    Optional argument:
+     - if 'quiet' is True, it does not fail if a filter catches nothing
+        (default True without argument,
+         default False if some filters are defined)
+
+    Without argument, it defaults to:
+        check_warnings(("", Warning), quiet=True)
+    """
+    quiet = kwargs.get('quiet')
+    if not filters:
+        filters = (("", Warning),)
+        # Preserve backward compatibility
+        if quiet is None:
+            quiet = True
+    return _filterwarnings(filters, quiet)
+
+
+class CleanImport(object):
+    """Context manager to force import to return a new module reference.
+
+    This is useful for testing module-level behaviours, such as
+    the emission of a DeprecationWarning on import.
+
+    Use like this:
+
+        with CleanImport("foo"):
+            importlib.import_module("foo") # new reference
+    """
+
+    def __init__(self, *module_names):
+        self.original_modules = sys.modules.copy()
+        for module_name in module_names:
+            if module_name in sys.modules:
+                module = sys.modules[module_name]
+                # It is possible that module_name is just an alias for
+                # another module (e.g. stub for modules renamed in 3.x).
+                # In that case, we also need delete the real module to clear
+                # the import cache.
+                if module.__name__ != module_name:
+                    del sys.modules[module.__name__]
+                del sys.modules[module_name]
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, *ignore_exc):
+        sys.modules.update(self.original_modules)
+
+
+class EnvironmentVarGuard(dict):
+
+    """Class to help protect the environment variable properly.  Can be used as
+    a context manager."""
+
+    def __init__(self):
+        self._environ = os.environ
+        self._changed = {}
+
+    def __getitem__(self, envvar):
+        return self._environ[envvar]
+
+    def __setitem__(self, envvar, value):
+        # Remember the initial value on the first access
+        if envvar not in self._changed:
+            self._changed[envvar] = self._environ.get(envvar)
+        self._environ[envvar] = value
+
+    def __delitem__(self, envvar):
+        # Remember the initial value on the first access
+        if envvar not in self._changed:
+            self._changed[envvar] = self._environ.get(envvar)
+        if envvar in self._environ:
+            del self._environ[envvar]
+
+    def keys(self):
+        return self._environ.keys()
+
+    def __iter__(self):
+        return iter(self._environ)
+
+    def __len__(self):
+        return len(self._environ)
+
+    def set(self, envvar, value):
+        self[envvar] = value
+
+    def unset(self, envvar):
+        del self[envvar]
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, *ignore_exc):
+        for k, v in self._changed.items():
+            if v is None:
+                if k in self._environ:
+                    del self._environ[k]
+            else:
+                self._environ[k] = v
+        os.environ = self._environ
+
+
+class DirsOnSysPath(object):
+    """Context manager to temporarily add directories to sys.path.
+
+    This makes a copy of sys.path, appends any directories given
+    as positional arguments, then reverts sys.path to the copied
+    settings when the context ends.
+
+    Note that *all* sys.path modifications in the body of the
+    context manager, including replacement of the object,
+    will be reverted at the end of the block.
+    """
+
+    def __init__(self, *paths):
+        self.original_value = sys.path[:]
+        self.original_object = sys.path
+        sys.path.extend(paths)
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, *ignore_exc):
+        sys.path = self.original_object
+        sys.path[:] = self.original_value
+
+
+class TransientResource(object):
+
+    """Raise ResourceDenied if an exception is raised while the context manager
+    is in effect that matches the specified exception and attributes."""
+
+    def __init__(self, exc, **kwargs):
+        self.exc = exc
+        self.attrs = kwargs
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, type_=None, value=None, traceback=None):
+        """If type_ is a subclass of self.exc and value has attributes matching
+        self.attrs, raise ResourceDenied.  Otherwise let the exception
+        propagate (if any)."""
+        if type_ is not None and issubclass(self.exc, type_):
+            for attr, attr_value in self.attrs.items():
+                if not hasattr(value, attr):
+                    break
+                if getattr(value, attr) != attr_value:
+                    break
+            else:
+                raise ResourceDenied("an optional resource is not available")
+
+# Context managers that raise ResourceDenied when various issues
+# with the Internet connection manifest themselves as exceptions.
+# XXX deprecate these and use transient_internet() instead
+time_out = TransientResource(IOError, errno=errno.ETIMEDOUT)
+socket_peer_reset = TransientResource(socket.error, errno=errno.ECONNRESET)
+ioerror_peer_reset = TransientResource(IOError, errno=errno.ECONNRESET)
+
+
+ at contextlib.contextmanager
+def transient_internet(resource_name, *, timeout=30.0, errnos=()):
+    """Return a context manager that raises ResourceDenied when various issues
+    with the Internet connection manifest themselves as exceptions."""
+    default_errnos = [
+        ('ECONNREFUSED', 111),
+        ('ECONNRESET', 104),
+        ('EHOSTUNREACH', 113),
+        ('ENETUNREACH', 101),
+        ('ETIMEDOUT', 110),
+    ]
+    default_gai_errnos = [
+        ('EAI_AGAIN', -3),
+        ('EAI_FAIL', -4),
+        ('EAI_NONAME', -2),
+        ('EAI_NODATA', -5),
+        # Encountered when trying to resolve IPv6-only hostnames
+        ('WSANO_DATA', 11004),
+    ]
+
+    denied = ResourceDenied("Resource '%s' is not available" % resource_name)
+    captured_errnos = errnos
+    gai_errnos = []
+    if not captured_errnos:
+        captured_errnos = [getattr(errno, name, num)
+                           for name, num in default_errnos]
+        gai_errnos = [getattr(socket, name, num)
+                      for name, num in default_gai_errnos]
+
+    def filter_error(err):
+        n = getattr(err, 'errno', None)
+        if (isinstance(err, socket.timeout) or
+            (isinstance(err, socket.gaierror) and n in gai_errnos) or
+            n in captured_errnos):
+            if not verbose:
+                sys.stderr.write(denied.args[0] + "\n")
+            raise denied from err
+
+    old_timeout = socket.getdefaulttimeout()
+    try:
+        if timeout is not None:
+            socket.setdefaulttimeout(timeout)
+        yield None
+    except IOError as err:
+        # urllib can wrap original socket errors multiple times (!), we must
+        # unwrap to get at the original error.
+        while True:
+            a = err.args
+            if len(a) >= 1 and isinstance(a[0], IOError):
+                err = a[0]
+            # The error can also be wrapped as args[1]:
+            #    except socket.error as msg:
+            #        raise IOError('socket error', msg).with_traceback(sys.exc_info()[2])
+            elif len(a) >= 2 and isinstance(a[1], IOError):
+                err = a[1]
+            else:
+                break
+        filter_error(err)
+        raise
+    # XXX should we catch generic exceptions and look for their
+    # __cause__ or __context__?
+    finally:
+        socket.setdefaulttimeout(old_timeout)
+
+
+ at contextlib.contextmanager
+def captured_output(stream_name):
+    """Return a context manager used by captured_stdout/stdin/stderr
+    that temporarily replaces the sys stream *stream_name* with a StringIO."""
+    import io
+    orig_stdout = getattr(sys, stream_name)
+    setattr(sys, stream_name, io.StringIO())
+    try:
+        yield getattr(sys, stream_name)
+    finally:
+        setattr(sys, stream_name, orig_stdout)
+
+def captured_stdout():
+    """Capture the output of sys.stdout:
+
+       with captured_stdout() as s:
+           print("hello")
+       self.assertEqual(s.getvalue(), "hello")
+    """
+    return captured_output("stdout")
+
+def captured_stderr():
+    return captured_output("stderr")
+
+def captured_stdin():
+    return captured_output("stdin")
+
+
+def gc_collect():
+    """Force as many objects as possible to be collected.
+
+    In non-CPython implementations of Python, this is needed because timely
+    deallocation is not guaranteed by the garbage collector.  (Even in CPython
+    this can be the case in case of reference cycles.)  This means that __del__
+    methods may be called later than expected and weakrefs may remain alive for
+    longer than expected.  This function tries its best to force all garbage
+    objects to disappear.
+    """
+    gc.collect()
+    if is_jython:
+        time.sleep(0.1)
+    gc.collect()
+    gc.collect()
+
+
+def python_is_optimized():
+    """Find if Python was built with optimizations."""
+    cflags = sysconfig.get_config_var('PY_CFLAGS') or ''
+    final_opt = ""
+    for opt in cflags.split():
+        if opt.startswith('-O'):
+            final_opt = opt
+    return final_opt and final_opt != '-O0'
+
+
+#=======================================================================
+# Decorator for running a function in a different locale, correctly resetting
+# it afterwards.
+
+def run_with_locale(catstr, *locales):
+    def decorator(func):
+        def inner(*args, **kwds):
+            try:
+                import locale
+                category = getattr(locale, catstr)
+                orig_locale = locale.setlocale(category)
+            except AttributeError:
+                # if the test author gives us an invalid category string
+                raise
+            except:
+                # cannot retrieve original locale, so do nothing
+                locale = orig_locale = None
+            else:
+                for loc in locales:
+                    try:
+                        locale.setlocale(category, loc)
+                        break
+                    except:
+                        pass
+
+            # now run the function, resetting the locale on exceptions
+            try:
+                return func(*args, **kwds)
+            finally:
+                if locale and orig_locale:
+                    locale.setlocale(category, orig_locale)
+        inner.__name__ = func.__name__
+        inner.__doc__ = func.__doc__
+        return inner
+    return decorator
+
+#=======================================================================
+# Big-memory-test support. Separate from 'resources' because memory use
+# should be configurable.
+
+# Some handy shorthands. Note that these are used for byte-limits as well
+# as size-limits, in the various bigmem tests
+_1M = 1024*1024
+_1G = 1024 * _1M
+_2G = 2 * _1G
+_4G = 4 * _1G
+
+MAX_Py_ssize_t = sys.maxsize
+
+def set_memlimit(limit):
+    global max_memuse
+    global real_max_memuse
+    sizes = {
+        'k': 1024,
+        'm': _1M,
+        'g': _1G,
+        't': 1024*_1G,
+    }
+    m = re.match(r'(\d+(\.\d+)?) (K|M|G|T)b?$', limit,
+                 re.IGNORECASE | re.VERBOSE)
+    if m is None:
+        raise ValueError('Invalid memory limit %r' % (limit,))
+    memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()])
+    real_max_memuse = memlimit
+    if memlimit > MAX_Py_ssize_t:
+        memlimit = MAX_Py_ssize_t
+    if memlimit < _2G - 1:
+        raise ValueError('Memory limit %r too low to be useful' % (limit,))
+    max_memuse = memlimit
+
+def _memory_watchdog(start_evt, finish_evt, period=10.0):
+    """A function which periodically watches the process' memory consumption
+    and prints it out.
+    """
+    # XXX: because of the GIL, and because the very long operations tested
+    # in most bigmem tests are uninterruptible, the loop below gets woken up
+    # much less often than expected.
+    # The polling code should be rewritten in raw C, without holding the GIL,
+    # and push results onto an anonymous pipe.
+    try:
+        page_size = os.sysconf('SC_PAGESIZE')
+    except (ValueError, AttributeError):
+        try:
+            page_size = os.sysconf('SC_PAGE_SIZE')
+        except (ValueError, AttributeError):
+            page_size = 4096
+    procfile = '/proc/{pid}/statm'.format(pid=os.getpid())
+    try:
+        f = open(procfile, 'rb')
+    except IOError as e:
+        warnings.warn('/proc not available for stats: {}'.format(e),
+                      RuntimeWarning)
+        sys.stderr.flush()
+        return
+    with f:
+        start_evt.set()
+        old_data = -1
+        while not finish_evt.wait(period):
+            f.seek(0)
+            statm = f.read().decode('ascii')
+            data = int(statm.split()[5])
+            if data != old_data:
+                old_data = data
+                print(" ... process data size: {data:.1f}G"
+                       .format(data=data * page_size / (1024 ** 3)))
+
+def bigmemtest(size, memuse, dry_run=True):
+    """Decorator for bigmem tests.
+
+    'minsize' is the minimum useful size for the test (in arbitrary,
+    test-interpreted units.) 'memuse' is the number of 'bytes per size' for
+    the test, or a good estimate of it.
+
+    if 'dry_run' is False, it means the test doesn't support dummy runs
+    when -M is not specified.
+    """
+    def decorator(f):
+        def wrapper(self):
+            size = wrapper.size
+            memuse = wrapper.memuse
+            if not real_max_memuse:
+                maxsize = 5147
+            else:
+                maxsize = size
+
+            if ((real_max_memuse or not dry_run)
+                and real_max_memuse < maxsize * memuse):
+                raise unittest.SkipTest(
+                    "not enough memory: %.1fG minimum needed"
+                    % (size * memuse / (1024 ** 3)))
+
+            if real_max_memuse and verbose and threading:
+                print()
+                print(" ... expected peak memory use: {peak:.1f}G"
+                      .format(peak=size * memuse / (1024 ** 3)))
+                sys.stdout.flush()
+                start_evt = threading.Event()
+                finish_evt = threading.Event()
+                t = threading.Thread(target=_memory_watchdog,
+                                     args=(start_evt, finish_evt, 0.5))
+                t.daemon = True
+                t.start()
+                start_evt.set()
+            else:
+                t = None
+
+            try:
+                return f(self, maxsize)
+            finally:
+                if t:
+                    finish_evt.set()
+                    t.join()
+
+        wrapper.size = size
+        wrapper.memuse = memuse
+        return wrapper
+    return decorator
+
+def bigaddrspacetest(f):
+    """Decorator for tests that fill the address space."""
+    def wrapper(self):
+        if max_memuse < MAX_Py_ssize_t:
+            if MAX_Py_ssize_t >= 2**63 - 1 and max_memuse >= 2**31:
+                raise unittest.SkipTest(
+                    "not enough memory: try a 32-bit build instead")
+            else:
+                raise unittest.SkipTest(
+                    "not enough memory: %.1fG minimum needed"
+                    % (MAX_Py_ssize_t / (1024 ** 3)))
+        else:
+            return f(self)
+    return wrapper
+
+#=======================================================================
+# unittest integration.
+
+class BasicTestRunner:
+    def run(self, test):
+        result = unittest.TestResult()
+        test(result)
+        return result
+
+def _id(obj):
+    return obj
+
+def requires_resource(resource):
+    if resource == 'gui' and not _is_gui_available():
+        return unittest.skip("resource 'gui' is not available")
+    if is_resource_enabled(resource):
+        return _id
+    else:
+        return unittest.skip("resource {0!r} is not enabled".format(resource))
+
+def cpython_only(test):
+    """
+    Decorator for tests only applicable on CPython.
+    """
+    return impl_detail(cpython=True)(test)
+
+def impl_detail(msg=None, **guards):
+    if check_impl_detail(**guards):
+        return _id
+    if msg is None:
+        guardnames, default = _parse_guards(guards)
+        if default:
+            msg = "implementation detail not available on {0}"
+        else:
+            msg = "implementation detail specific to {0}"
+        guardnames = sorted(guardnames.keys())
+        msg = msg.format(' or '.join(guardnames))
+    return unittest.skip(msg)
+
+def _parse_guards(guards):
+    # Returns a tuple ({platform_name: run_me}, default_value)
+    if not guards:
+        return ({'cpython': True}, False)
+    is_true = list(guards.values())[0]
+    assert list(guards.values()) == [is_true] * len(guards)   # all True or all False
+    return (guards, not is_true)
+
+# Use the following check to guard CPython's implementation-specific tests --
+# or to run them only on the implementation(s) guarded by the arguments.
+def check_impl_detail(**guards):
+    """This function returns True or False depending on the host platform.
+       Examples:
+          if check_impl_detail():               # only on CPython (default)
+          if check_impl_detail(jython=True):    # only on Jython
+          if check_impl_detail(cpython=False):  # everywhere except on CPython
+    """
+    guards, default = _parse_guards(guards)
+    return guards.get(platform.python_implementation().lower(), default)
+
+
+def _filter_suite(suite, pred):
+    """Recursively filter test cases in a suite based on a predicate."""
+    newtests = []
+    for test in suite._tests:
+        if isinstance(test, unittest.TestSuite):
+            _filter_suite(test, pred)
+            newtests.append(test)
+        else:
+            if pred(test):
+                newtests.append(test)
+    suite._tests = newtests
+
+
+def _run_suite(suite):
+    """Run tests from a unittest.TestSuite-derived class."""
+    if verbose:
+        runner = unittest.TextTestRunner(sys.stdout, verbosity=2,
+                                         failfast=failfast)
+    else:
+        runner = BasicTestRunner()
+
+    result = runner.run(suite)
+    if not result.wasSuccessful():
+        if len(result.errors) == 1 and not result.failures:
+            err = result.errors[0][1]
+        elif len(result.failures) == 1 and not result.errors:
+            err = result.failures[0][1]
+        else:
+            err = "multiple errors occurred"
+            if not verbose: err += "; run in verbose mode for details"
+        raise TestFailed(err)
+
+
+def run_unittest(*classes):
+    """Run tests from unittest.TestCase-derived classes."""
+    valid_types = (unittest.TestSuite, unittest.TestCase)
+    suite = unittest.TestSuite()
+    for cls in classes:
+        if isinstance(cls, str):
+            if cls in sys.modules:
+                suite.addTest(unittest.findTestCases(sys.modules[cls]))
+            else:
+                raise ValueError("str arguments must be keys in sys.modules")
+        elif isinstance(cls, valid_types):
+            suite.addTest(cls)
+        else:
+            suite.addTest(unittest.makeSuite(cls))
+    def case_pred(test):
+        if match_tests is None:
+            return True
+        for name in test.id().split("."):
+            if fnmatch.fnmatchcase(name, match_tests):
+                return True
+        return False
+    _filter_suite(suite, case_pred)
+    _run_suite(suite)
+
+
+#=======================================================================
+# doctest driver.
+
+def run_doctest(module, verbosity=None):
+    """Run doctest on the given module.  Return (#failures, #tests).
+
+    If optional argument verbosity is not specified (or is None), pass
+    support's belief about verbosity on to doctest.  Else doctest's
+    usual behavior is used (it searches sys.argv for -v).
+    """
+
+    import doctest
+
+    if verbosity is None:
+        verbosity = verbose
+    else:
+        verbosity = None
+
+    f, t = doctest.testmod(module, verbose=verbosity)
+    if f:
+        raise TestFailed("%d of %d doctests failed" % (f, t))
+    if verbose:
+        print('doctest (%s) ... %d tests with zero failures' %
+              (module.__name__, t))
+    return f, t
+
+
+#=======================================================================
+# Support for saving and restoring the imported modules.
+
+def modules_setup():
+    return sys.modules.copy(),
+
+def modules_cleanup(oldmodules):
+    # Encoders/decoders are registered permanently within the internal
+    # codec cache. If we destroy the corresponding modules their
+    # globals will be set to None which will trip up the cached functions.
+    encodings = [(k, v) for k, v in sys.modules.items()
+                 if k.startswith('encodings.')]
+    sys.modules.clear()
+    sys.modules.update(encodings)
+    # XXX: This kind of problem can affect more than just encodings. In particular
+    # extension modules (such as _ssl) don't cope with reloading properly.
+    # Really, test modules should be cleaning out the test specific modules they
+    # know they added (ala test_runpy) rather than relying on this function (as
+    # test_importhooks and test_pkg do currently).
+    # Implicitly imported *real* modules should be left alone (see issue 10556).
+    sys.modules.update(oldmodules)
+
+#=======================================================================
+# Threading support to prevent reporting refleaks when running regrtest.py -R
+
+# NOTE: we use thread._count() rather than threading.enumerate() (or the
+# moral equivalent thereof) because a threading.Thread object is still alive
+# until its __bootstrap() method has returned, even after it has been
+# unregistered from the threading module.
+# thread._count(), on the other hand, only gets decremented *after* the
+# __bootstrap() method has returned, which gives us reliable reference counts
+# at the end of a test run.
+
+def threading_setup():
+    if _thread:
+        return _thread._count(), threading._dangling.copy()
+    else:
+        return 1, ()
+
+def threading_cleanup(*original_values):
+    if not _thread:
+        return
+    _MAX_COUNT = 10
+    for count in range(_MAX_COUNT):
+        values = _thread._count(), threading._dangling
+        if values == original_values:
+            break
+        time.sleep(0.1)
+        gc_collect()
+    # XXX print a warning in case of failure?
+
+def reap_threads(func):
+    """Use this function when threads are being used.  This will
+    ensure that the threads are cleaned up even when the test fails.
+    If threading is unavailable this function does nothing.
+    """
+    if not _thread:
+        return func
+
+    @functools.wraps(func)
+    def decorator(*args):
+        key = threading_setup()
+        try:
+            return func(*args)
+        finally:
+            threading_cleanup(*key)
+    return decorator
+
+def reap_children():
+    """Use this function at the end of test_main() whenever sub-processes
+    are started.  This will help ensure that no extra children (zombies)
+    stick around to hog resources and create problems when looking
+    for refleaks.
+    """
+
+    # Reap all our dead child processes so we don't leave zombies around.
+    # These hog resources and might be causing some of the buildbots to die.
+    if hasattr(os, 'waitpid'):
+        any_process = -1
+        while True:
+            try:
+                # This will raise an exception on Windows.  That's ok.
+                pid, status = os.waitpid(any_process, os.WNOHANG)
+                if pid == 0:
+                    break
+            except:
+                break
+
+ at contextlib.contextmanager
+def swap_attr(obj, attr, new_val):
+    """Temporary swap out an attribute with a new object.
+
+    Usage:
+        with swap_attr(obj, "attr", 5):
+            ...
+
+        This will set obj.attr to 5 for the duration of the with: block,
+        restoring the old value at the end of the block. If `attr` doesn't
+        exist on `obj`, it will be created and then deleted at the end of the
+        block.
+    """
+    if hasattr(obj, attr):
+        real_val = getattr(obj, attr)
+        setattr(obj, attr, new_val)
+        try:
+            yield None
+        finally:
+            setattr(obj, attr, real_val)
+    else:
+        setattr(obj, attr, new_val)
+        try:
+            yield None
+        finally:
+            delattr(obj, attr)
+
+ at contextlib.contextmanager
+def swap_item(obj, item, new_val):
+    """Temporary swap out an item with a new object.
+
+    Usage:
+        with swap_item(obj, "item", 5):
+            ...
+
+        This will set obj["item"] to 5 for the duration of the with: block,
+        restoring the old value at the end of the block. If `item` doesn't
+        exist on `obj`, it will be created and then deleted at the end of the
+        block.
+    """
+    if item in obj:
+        real_val = obj[item]
+        obj[item] = new_val
+        try:
+            yield None
+        finally:
+            obj[item] = real_val
+    else:
+        obj[item] = new_val
+        try:
+            yield None
+        finally:
+            del obj[item]
+
+def strip_python_stderr(stderr):
+    """Strip the stderr of a Python process from potential debug output
+    emitted by the interpreter.
+
+    This will typically be run on the result of the communicate() method
+    of a subprocess.Popen object.
+    """
+    stderr = re.sub(br"\[\d+ refs\]\r?\n?$", b"", stderr).strip()
+    return stderr
+
+def args_from_interpreter_flags():
+    """Return a list of command-line arguments reproducing the current
+    settings in sys.flags."""
+    flag_opt_map = {
+        'bytes_warning': 'b',
+        'dont_write_bytecode': 'B',
+        'hash_randomization': 'R',
+        'ignore_environment': 'E',
+        'no_user_site': 's',
+        'no_site': 'S',
+        'optimize': 'O',
+        'verbose': 'v',
+    }
+    args = []
+    for flag, opt in flag_opt_map.items():
+        v = getattr(sys.flags, flag)
+        if v > 0:
+            args.append('-' + opt * v)
+    return args
+
+#============================================================
+# Support for assertions about logging.
+#============================================================
+
+class TestHandler(logging.handlers.BufferingHandler):
+    def __init__(self, matcher):
+        # BufferingHandler takes a "capacity" argument
+        # so as to know when to flush. As we're overriding
+        # shouldFlush anyway, we can set a capacity of zero.
+        # You can call flush() manually to clear out the
+        # buffer.
+        logging.handlers.BufferingHandler.__init__(self, 0)
+        self.matcher = matcher
+
+    def shouldFlush(self, record):
+        return False
+
+    def emit(self, record):
+        self.format(record)
+        self.buffer.append(record.__dict__)
+
+    def matches(self, **kwargs):
+        """
+        Look for a saved dict whose keys/values match the supplied arguments.
+        """
+        result = False
+        for d in self.buffer:
+            if self.matcher.matches(d, **kwargs):
+                result = True
+                break
+        return result
+
+class Matcher(object):
+
+    _partial_matches = ('msg', 'message')
+
+    def matches(self, d, **kwargs):
+        """
+        Try to match a single dict with the supplied arguments.
+
+        Keys whose values are strings and which are in self._partial_matches
+        will be checked for partial (i.e. substring) matches. You can extend
+        this scheme to (for example) do regular expression matching, etc.
+        """
+        result = True
+        for k in kwargs:
+            v = kwargs[k]
+            dv = d.get(k)
+            if not self.match_value(k, dv, v):
+                result = False
+                break
+        return result
+
+    def match_value(self, k, dv, v):
+        """
+        Try to match a single stored value (dv) with a supplied value (v).
+        """
+        if type(v) != type(dv):
+            result = False
+        elif type(dv) is not str or k not in self._partial_matches:
+            result = (v == dv)
+        else:
+            result = dv.find(v) >= 0
+        return result
+
+
+_can_symlink = None # type: Any
+def can_symlink():
+    global _can_symlink
+    if _can_symlink is not None:
+        return _can_symlink
+    symlink_path = TESTFN + "can_symlink"
+    try:
+        os.symlink(TESTFN, symlink_path)
+        can = True
+    except (OSError, NotImplementedError, AttributeError):
+        can = False
+    else:
+        os.remove(symlink_path)
+    _can_symlink = can
+    return can
+
+def skip_unless_symlink(test):
+    """Skip decorator for tests that require functional symlink"""
+    ok = can_symlink()
+    msg = "Requires functional symlink implementation"
+    if ok:
+        return test
+    else:
+        return unittest.skip(msg)(test)
+
+def patch(test_instance, object_to_patch, attr_name, new_value):
+    """Override 'object_to_patch'.'attr_name' with 'new_value'.
+
+    Also, add a cleanup procedure to 'test_instance' to restore
+    'object_to_patch' value for 'attr_name'.
+    The 'attr_name' should be a valid attribute for 'object_to_patch'.
+
+    """
+    # check that 'attr_name' is a real attribute for 'object_to_patch'
+    # will raise AttributeError if it does not exist
+    getattr(object_to_patch, attr_name)
+
+    # keep a copy of the old value
+    attr_is_local = False
+    try:
+        old_value = object_to_patch.__dict__[attr_name]
+    except (AttributeError, KeyError):
+        old_value = getattr(object_to_patch, attr_name, None)
+    else:
+        attr_is_local = True
+
+    # restore the value when the test is done
+    def cleanup():
+        if attr_is_local:
+            setattr(object_to_patch, attr_name, old_value)
+        else:
+            delattr(object_to_patch, attr_name)
+
+    test_instance.addCleanup(cleanup)
+
+    # actually override the attribute
+    setattr(object_to_patch, attr_name, new_value)
diff --git a/test-data/stdlib-samples/3.2/test/test_base64.py b/test-data/stdlib-samples/3.2/test/test_base64.py
new file mode 100644
index 0000000..9e4dcf5
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_base64.py
@@ -0,0 +1,267 @@
+import unittest
+from test import support
+import base64
+import binascii
+import sys
+import subprocess
+
+from typing import Any
+
+
+
+class LegacyBase64TestCase(unittest.TestCase):
+    def test_encodebytes(self) -> None:
+        eq = self.assertEqual
+        eq(base64.encodebytes(b"www.python.org"), b"d3d3LnB5dGhvbi5vcmc=\n")
+        eq(base64.encodebytes(b"a"), b"YQ==\n")
+        eq(base64.encodebytes(b"ab"), b"YWI=\n")
+        eq(base64.encodebytes(b"abc"), b"YWJj\n")
+        eq(base64.encodebytes(b""), b"")
+        eq(base64.encodebytes(b"abcdefghijklmnopqrstuvwxyz"
+                               b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+                               b"0123456789!@#0^&*();:<>,. []{}"),
+           b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
+           b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT"
+           b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n")
+        self.assertRaises(TypeError, base64.encodebytes, "")
+
+    def test_decodebytes(self) -> None:
+        eq = self.assertEqual
+        eq(base64.decodebytes(b"d3d3LnB5dGhvbi5vcmc=\n"), b"www.python.org")
+        eq(base64.decodebytes(b"YQ==\n"), b"a")
+        eq(base64.decodebytes(b"YWI=\n"), b"ab")
+        eq(base64.decodebytes(b"YWJj\n"), b"abc")
+        eq(base64.decodebytes(b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
+                               b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT"
+                               b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n"),
+           b"abcdefghijklmnopqrstuvwxyz"
+           b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+           b"0123456789!@#0^&*();:<>,. []{}")
+        eq(base64.decodebytes(b''), b'')
+        self.assertRaises(TypeError, base64.decodebytes, "")
+
+    def test_encode(self) -> None:
+        eq = self.assertEqual
+        from io import BytesIO
+        infp = BytesIO(b'abcdefghijklmnopqrstuvwxyz'
+                       b'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+                       b'0123456789!@#0^&*();:<>,. []{}')
+        outfp = BytesIO()
+        base64.encode(infp, outfp)
+        eq(outfp.getvalue(),
+           b'YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE'
+           b'RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT'
+           b'Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n')
+
+    def test_decode(self) -> None:
+        from io import BytesIO
+        infp = BytesIO(b'd3d3LnB5dGhvbi5vcmc=')
+        outfp = BytesIO()
+        base64.decode(infp, outfp)
+        self.assertEqual(outfp.getvalue(), b'www.python.org')
+
+
+class BaseXYTestCase(unittest.TestCase):
+    def test_b64encode(self) -> None:
+        eq = self.assertEqual
+        # Test default alphabet
+        eq(base64.b64encode(b"www.python.org"), b"d3d3LnB5dGhvbi5vcmc=")
+        eq(base64.b64encode(b'\x00'), b'AA==')
+        eq(base64.b64encode(b"a"), b"YQ==")
+        eq(base64.b64encode(b"ab"), b"YWI=")
+        eq(base64.b64encode(b"abc"), b"YWJj")
+        eq(base64.b64encode(b""), b"")
+        eq(base64.b64encode(b"abcdefghijklmnopqrstuvwxyz"
+                            b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+                            b"0123456789!@#0^&*();:<>,. []{}"),
+           b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
+           b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT"
+           b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==")
+        # Test with arbitrary alternative characters
+        eq(base64.b64encode(b'\xd3V\xbeo\xf7\x1d', altchars=b'*$'), b'01a*b$cd')
+        # Check if passing a str object raises an error
+        self.assertRaises(TypeError, base64.b64encode, "")
+        self.assertRaises(TypeError, base64.b64encode, b"", altchars="")
+        # Test standard alphabet
+        eq(base64.standard_b64encode(b"www.python.org"), b"d3d3LnB5dGhvbi5vcmc=")
+        eq(base64.standard_b64encode(b"a"), b"YQ==")
+        eq(base64.standard_b64encode(b"ab"), b"YWI=")
+        eq(base64.standard_b64encode(b"abc"), b"YWJj")
+        eq(base64.standard_b64encode(b""), b"")
+        eq(base64.standard_b64encode(b"abcdefghijklmnopqrstuvwxyz"
+                                     b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+                                     b"0123456789!@#0^&*();:<>,. []{}"),
+           b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
+           b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT"
+           b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==")
+        # Check if passing a str object raises an error
+        self.assertRaises(TypeError, base64.standard_b64encode, "")
+        self.assertRaises(TypeError, base64.standard_b64encode, b"", altchars="")
+        # Test with 'URL safe' alternative characters
+        eq(base64.urlsafe_b64encode(b'\xd3V\xbeo\xf7\x1d'), b'01a-b_cd')
+        # Check if passing a str object raises an error
+        self.assertRaises(TypeError, base64.urlsafe_b64encode, "")
+
+    def test_b64decode(self) -> None:
+        eq = self.assertEqual
+        eq(base64.b64decode(b"d3d3LnB5dGhvbi5vcmc="), b"www.python.org")
+        eq(base64.b64decode(b'AA=='), b'\x00')
+        eq(base64.b64decode(b"YQ=="), b"a")
+        eq(base64.b64decode(b"YWI="), b"ab")
+        eq(base64.b64decode(b"YWJj"), b"abc")
+        eq(base64.b64decode(b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
+                            b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT"
+                            b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="),
+           b"abcdefghijklmnopqrstuvwxyz"
+           b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+           b"0123456789!@#0^&*();:<>,. []{}")
+        eq(base64.b64decode(b''), b'')
+        # Test with arbitrary alternative characters
+        eq(base64.b64decode(b'01a*b$cd', altchars=b'*$'), b'\xd3V\xbeo\xf7\x1d')
+        # Check if passing a str object raises an error
+        self.assertRaises(TypeError, base64.b64decode, "")
+        self.assertRaises(TypeError, base64.b64decode, b"", altchars="")
+        # Test standard alphabet
+        eq(base64.standard_b64decode(b"d3d3LnB5dGhvbi5vcmc="), b"www.python.org")
+        eq(base64.standard_b64decode(b"YQ=="), b"a")
+        eq(base64.standard_b64decode(b"YWI="), b"ab")
+        eq(base64.standard_b64decode(b"YWJj"), b"abc")
+        eq(base64.standard_b64decode(b""), b"")
+        eq(base64.standard_b64decode(b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
+                                     b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT"
+                                     b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="),
+           b"abcdefghijklmnopqrstuvwxyz"
+           b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+           b"0123456789!@#0^&*();:<>,. []{}")
+        # Check if passing a str object raises an error
+        self.assertRaises(TypeError, base64.standard_b64decode, "")
+        self.assertRaises(TypeError, base64.standard_b64decode, b"", altchars="")
+        # Test with 'URL safe' alternative characters
+        eq(base64.urlsafe_b64decode(b'01a-b_cd'), b'\xd3V\xbeo\xf7\x1d')
+        self.assertRaises(TypeError, base64.urlsafe_b64decode, "")
+
+    def test_b64decode_padding_error(self) -> None:
+        self.assertRaises(binascii.Error, base64.b64decode, b'abc')
+
+    def test_b64decode_invalid_chars(self) -> None:
+        # issue 1466065: Test some invalid characters.
+        tests = ((b'%3d==', b'\xdd'),
+                 (b'$3d==', b'\xdd'),
+                 (b'[==', b''),
+                 (b'YW]3=', b'am'),
+                 (b'3{d==', b'\xdd'),
+                 (b'3d}==', b'\xdd'),
+                 (b'@@', b''),
+                 (b'!', b''),
+                 (b'YWJj\nYWI=', b'abcab'))
+        for bstr, res in tests:
+            self.assertEqual(base64.b64decode(bstr), res)
+            with self.assertRaises(binascii.Error):
+                base64.b64decode(bstr, validate=True)
+
+    def test_b32encode(self) -> None:
+        eq = self.assertEqual
+        eq(base64.b32encode(b''), b'')
+        eq(base64.b32encode(b'\x00'), b'AA======')
+        eq(base64.b32encode(b'a'), b'ME======')
+        eq(base64.b32encode(b'ab'), b'MFRA====')
+        eq(base64.b32encode(b'abc'), b'MFRGG===')
+        eq(base64.b32encode(b'abcd'), b'MFRGGZA=')
+        eq(base64.b32encode(b'abcde'), b'MFRGGZDF')
+        self.assertRaises(TypeError, base64.b32encode, "")
+
+    def test_b32decode(self) -> None:
+        eq = self.assertEqual
+        eq(base64.b32decode(b''), b'')
+        eq(base64.b32decode(b'AA======'), b'\x00')
+        eq(base64.b32decode(b'ME======'), b'a')
+        eq(base64.b32decode(b'MFRA===='), b'ab')
+        eq(base64.b32decode(b'MFRGG==='), b'abc')
+        eq(base64.b32decode(b'MFRGGZA='), b'abcd')
+        eq(base64.b32decode(b'MFRGGZDF'), b'abcde')
+        self.assertRaises(TypeError, base64.b32decode, "")
+
+    def test_b32decode_casefold(self) -> None:
+        eq = self.assertEqual
+        eq(base64.b32decode(b'', True), b'')
+        eq(base64.b32decode(b'ME======', True), b'a')
+        eq(base64.b32decode(b'MFRA====', True), b'ab')
+        eq(base64.b32decode(b'MFRGG===', True), b'abc')
+        eq(base64.b32decode(b'MFRGGZA=', True), b'abcd')
+        eq(base64.b32decode(b'MFRGGZDF', True), b'abcde')
+        # Lower cases
+        eq(base64.b32decode(b'me======', True), b'a')
+        eq(base64.b32decode(b'mfra====', True), b'ab')
+        eq(base64.b32decode(b'mfrgg===', True), b'abc')
+        eq(base64.b32decode(b'mfrggza=', True), b'abcd')
+        eq(base64.b32decode(b'mfrggzdf', True), b'abcde')
+        # Expected exceptions
+        self.assertRaises(TypeError, base64.b32decode, b'me======')
+        # Mapping zero and one
+        eq(base64.b32decode(b'MLO23456'), b'b\xdd\xad\xf3\xbe')
+        eq(base64.b32decode(b'M1023456', map01=b'L'), b'b\xdd\xad\xf3\xbe')
+        eq(base64.b32decode(b'M1023456', map01=b'I'), b'b\x1d\xad\xf3\xbe')
+        self.assertRaises(TypeError, base64.b32decode, b"", map01="")
+
+    def test_b32decode_error(self) -> None:
+        self.assertRaises(binascii.Error, base64.b32decode, b'abc')
+        self.assertRaises(binascii.Error, base64.b32decode, b'ABCDEF==')
+
+    def test_b16encode(self) -> None:
+        eq = self.assertEqual
+        eq(base64.b16encode(b'\x01\x02\xab\xcd\xef'), b'0102ABCDEF')
+        eq(base64.b16encode(b'\x00'), b'00')
+        self.assertRaises(TypeError, base64.b16encode, "")
+
+    def test_b16decode(self) -> None:
+        eq = self.assertEqual
+        eq(base64.b16decode(b'0102ABCDEF'), b'\x01\x02\xab\xcd\xef')
+        eq(base64.b16decode(b'00'), b'\x00')
+        # Lower case is not allowed without a flag
+        self.assertRaises(binascii.Error, base64.b16decode, b'0102abcdef')
+        # Case fold
+        eq(base64.b16decode(b'0102abcdef', True), b'\x01\x02\xab\xcd\xef')
+        self.assertRaises(TypeError, base64.b16decode, "")
+
+    def test_ErrorHeritage(self) -> None:
+        self.assertTrue(issubclass(binascii.Error, ValueError))
+
+
+
+class TestMain(unittest.TestCase):
+    def get_output(self, *args_tuple: str, **options: Any) -> Any:
+        args = [sys.executable, '-m', 'base64'] + list(args_tuple)
+        return subprocess.check_output(args, **options)
+
+    def test_encode_decode(self) -> None:
+        output = self.get_output('-t')
+        self.assertSequenceEqual(output.splitlines(), [
+            b"b'Aladdin:open sesame'",
+            br"b'QWxhZGRpbjpvcGVuIHNlc2FtZQ==\n'",
+            b"b'Aladdin:open sesame'",
+        ])
+
+    def test_encode_file(self) -> None:
+        with open(support.TESTFN, 'wb') as fp:
+            fp.write(b'a\xffb\n')
+
+        output = self.get_output('-e', support.TESTFN)
+        self.assertEqual(output.rstrip(), b'Yf9iCg==')
+
+        with open(support.TESTFN, 'rb') as fp:
+            output = self.get_output('-e', stdin=fp)
+        self.assertEqual(output.rstrip(), b'Yf9iCg==')
+
+    def test_decode(self) -> None:
+        with open(support.TESTFN, 'wb') as fp:
+            fp.write(b'Yf9iCg==')
+        output = self.get_output('-d', support.TESTFN)
+        self.assertEqual(output.rstrip(), b'a\xffb')
+
+
+
+def test_main() -> None:
+    support.run_unittest(__name__)
+
+if __name__ == '__main__':
+    test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_fnmatch.py b/test-data/stdlib-samples/3.2/test/test_fnmatch.py
new file mode 100644
index 0000000..0f5a23b
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_fnmatch.py
@@ -0,0 +1,93 @@
+"""Test cases for the fnmatch module."""
+
+from test import support
+import unittest
+
+from fnmatch import fnmatch, fnmatchcase, translate, filter
+
+from typing import Any, AnyStr, Callable
+
+class FnmatchTestCase(unittest.TestCase):
+
+    def check_match(self, filename: AnyStr, pattern: AnyStr,
+                    should_match: int = 1,
+                    fn: Any = fnmatch) -> None:  # see #270
+        if should_match:
+            self.assertTrue(fn(filename, pattern),
+                         "expected %r to match pattern %r"
+                         % (filename, pattern))
+        else:
+            self.assertTrue(not fn(filename, pattern),
+                         "expected %r not to match pattern %r"
+                         % (filename, pattern))
+
+    def test_fnmatch(self) -> None:
+        check = self.check_match
+        check('abc', 'abc')
+        check('abc', '?*?')
+        check('abc', '???*')
+        check('abc', '*???')
+        check('abc', '???')
+        check('abc', '*')
+        check('abc', 'ab[cd]')
+        check('abc', 'ab[!de]')
+        check('abc', 'ab[de]', 0)
+        check('a', '??', 0)
+        check('a', 'b', 0)
+
+        # these test that '\' is handled correctly in character sets;
+        # see SF bug #409651
+        check('\\', r'[\]')
+        check('a', r'[!\]')
+        check('\\', r'[!\]', 0)
+
+        # test that filenames with newlines in them are handled correctly.
+        # http://bugs.python.org/issue6665
+        check('foo\nbar', 'foo*')
+        check('foo\nbar\n', 'foo*')
+        check('\nfoo', 'foo*', False)
+        check('\n', '*')
+
+    def test_mix_bytes_str(self) -> None:
+        self.assertRaises(TypeError, fnmatch, 'test', b'*')
+        self.assertRaises(TypeError, fnmatch, b'test', '*')
+        self.assertRaises(TypeError, fnmatchcase, 'test', b'*')
+        self.assertRaises(TypeError, fnmatchcase, b'test', '*')
+
+    def test_fnmatchcase(self) -> None:
+        check = self.check_match
+        check('AbC', 'abc', 0, fnmatchcase)
+        check('abc', 'AbC', 0, fnmatchcase)
+
+    def test_bytes(self) -> None:
+        self.check_match(b'test', b'te*')
+        self.check_match(b'test\xff', b'te*\xff')
+        self.check_match(b'foo\nbar', b'foo*')
+
+class TranslateTestCase(unittest.TestCase):
+
+    def test_translate(self) -> None:
+        self.assertEqual(translate('*'), '.*\Z(?ms)')
+        self.assertEqual(translate('?'), '.\Z(?ms)')
+        self.assertEqual(translate('a?b*'), 'a.b.*\Z(?ms)')
+        self.assertEqual(translate('[abc]'), '[abc]\Z(?ms)')
+        self.assertEqual(translate('[]]'), '[]]\Z(?ms)')
+        self.assertEqual(translate('[!x]'), '[^x]\Z(?ms)')
+        self.assertEqual(translate('[^x]'), '[\\^x]\Z(?ms)')
+        self.assertEqual(translate('[x'), '\\[x\Z(?ms)')
+
+
+class FilterTestCase(unittest.TestCase):
+
+    def test_filter(self) -> None:
+        self.assertEqual(filter(['a', 'b'], 'a'), ['a'])
+
+
+def test_main() -> None:
+    support.run_unittest(FnmatchTestCase,
+                         TranslateTestCase,
+                         FilterTestCase)
+
+
+if __name__ == "__main__":
+    test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_genericpath.py b/test-data/stdlib-samples/3.2/test/test_genericpath.py
new file mode 100644
index 0000000..43b78e7
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_genericpath.py
@@ -0,0 +1,313 @@
+"""
+Tests common to genericpath, macpath, ntpath and posixpath
+"""
+
+import unittest
+from test import support
+import os
+
+import genericpath
+import imp
+imp.reload(genericpath) # Make sure we are using the local copy
+
+import sys
+from typing import Any, List
+
+
+def safe_rmdir(dirname: str) -> None:
+    try:
+        os.rmdir(dirname)
+    except OSError:
+        pass
+
+
+class GenericTest(unittest.TestCase):
+    # The path module to be tested
+    pathmodule = genericpath # type: Any
+    common_attributes = ['commonprefix', 'getsize', 'getatime', 'getctime',
+                         'getmtime', 'exists', 'isdir', 'isfile']
+    attributes = []  # type: List[str]
+
+    def test_no_argument(self) -> None:
+        for attr in self.common_attributes + self.attributes:
+            with self.assertRaises(TypeError):
+                getattr(self.pathmodule, attr)()
+                self.fail("{}.{}() did not raise a TypeError"
+                          .format(self.pathmodule.__name__, attr))
+
+    def test_commonprefix(self) -> None:
+        commonprefix = self.pathmodule.commonprefix
+        self.assertEqual(
+            commonprefix([]),
+            ""
+        )
+        self.assertEqual(
+            commonprefix(["/home/swenson/spam", "/home/swen/spam"]),
+            "/home/swen"
+        )
+        self.assertEqual(
+            commonprefix(["/home/swen/spam", "/home/swen/eggs"]),
+            "/home/swen/"
+        )
+        self.assertEqual(
+            commonprefix(["/home/swen/spam", "/home/swen/spam"]),
+            "/home/swen/spam"
+        )
+        self.assertEqual(
+            commonprefix(["home:swenson:spam", "home:swen:spam"]),
+            "home:swen"
+        )
+        self.assertEqual(
+            commonprefix([":home:swen:spam", ":home:swen:eggs"]),
+            ":home:swen:"
+        )
+        self.assertEqual(
+            commonprefix([":home:swen:spam", ":home:swen:spam"]),
+            ":home:swen:spam"
+        )
+
+        self.assertEqual(
+            commonprefix([b"/home/swenson/spam", b"/home/swen/spam"]),
+            b"/home/swen"
+        )
+        self.assertEqual(
+            commonprefix([b"/home/swen/spam", b"/home/swen/eggs"]),
+            b"/home/swen/"
+        )
+        self.assertEqual(
+            commonprefix([b"/home/swen/spam", b"/home/swen/spam"]),
+            b"/home/swen/spam"
+        )
+        self.assertEqual(
+            commonprefix([b"home:swenson:spam", b"home:swen:spam"]),
+            b"home:swen"
+        )
+        self.assertEqual(
+            commonprefix([b":home:swen:spam", b":home:swen:eggs"]),
+            b":home:swen:"
+        )
+        self.assertEqual(
+            commonprefix([b":home:swen:spam", b":home:swen:spam"]),
+            b":home:swen:spam"
+        )
+
+        testlist = ['', 'abc', 'Xbcd', 'Xb', 'XY', 'abcd',
+                    'aXc', 'abd', 'ab', 'aX', 'abcX']
+        for s1 in testlist:
+            for s2 in testlist:
+                p = commonprefix([s1, s2])
+                self.assertTrue(s1.startswith(p))
+                self.assertTrue(s2.startswith(p))
+                if s1 != s2:
+                    n = len(p)
+                    self.assertNotEqual(s1[n:n+1], s2[n:n+1])
+
+    def test_getsize(self) -> None:
+        f = open(support.TESTFN, "wb")
+        try:
+            f.write(b"foo")
+            f.close()
+            self.assertEqual(self.pathmodule.getsize(support.TESTFN), 3)
+        finally:
+            if not f.closed:
+                f.close()
+            support.unlink(support.TESTFN)
+
+    def test_time(self) -> None:
+        f = open(support.TESTFN, "wb")
+        try:
+            f.write(b"foo")
+            f.close()
+            f = open(support.TESTFN, "ab")
+            f.write(b"bar")
+            f.close()
+            f = open(support.TESTFN, "rb")
+            d = f.read()
+            f.close()
+            self.assertEqual(d, b"foobar")
+
+            self.assertLessEqual(
+                self.pathmodule.getctime(support.TESTFN),
+                self.pathmodule.getmtime(support.TESTFN)
+            )
+        finally:
+            if not f.closed:
+                f.close()
+            support.unlink(support.TESTFN)
+
+    def test_exists(self) -> None:
+        self.assertIs(self.pathmodule.exists(support.TESTFN), False)
+        f = open(support.TESTFN, "wb")
+        try:
+            f.write(b"foo")
+            f.close()
+            self.assertIs(self.pathmodule.exists(support.TESTFN), True)
+            if not self.pathmodule == genericpath:
+                self.assertIs(self.pathmodule.lexists(support.TESTFN),
+                              True)
+        finally:
+            if not f.closed:
+                f.close()
+            support.unlink(support.TESTFN)
+
+    def test_isdir(self) -> None:
+        self.assertIs(self.pathmodule.isdir(support.TESTFN), False)
+        f = open(support.TESTFN, "wb")
+        try:
+            f.write(b"foo")
+            f.close()
+            self.assertIs(self.pathmodule.isdir(support.TESTFN), False)
+            os.remove(support.TESTFN)
+            os.mkdir(support.TESTFN)
+            self.assertIs(self.pathmodule.isdir(support.TESTFN), True)
+            os.rmdir(support.TESTFN)
+        finally:
+            if not f.closed:
+                f.close()
+            support.unlink(support.TESTFN)
+            safe_rmdir(support.TESTFN)
+
+    def test_isfile(self) -> None:
+        self.assertIs(self.pathmodule.isfile(support.TESTFN), False)
+        f = open(support.TESTFN, "wb")
+        try:
+            f.write(b"foo")
+            f.close()
+            self.assertIs(self.pathmodule.isfile(support.TESTFN), True)
+            os.remove(support.TESTFN)
+            os.mkdir(support.TESTFN)
+            self.assertIs(self.pathmodule.isfile(support.TESTFN), False)
+            os.rmdir(support.TESTFN)
+        finally:
+            if not f.closed:
+                f.close()
+            support.unlink(support.TESTFN)
+            safe_rmdir(support.TESTFN)
+
+
+# Following TestCase is not supposed to be run from test_genericpath.
+# It is inherited by other test modules (macpath, ntpath, posixpath).
+
+class CommonTest(GenericTest):
+    # The path module to be tested
+    pathmodule = None # type: Any
+    common_attributes = GenericTest.common_attributes + [
+        # Properties
+        'curdir', 'pardir', 'extsep', 'sep',
+        'pathsep', 'defpath', 'altsep', 'devnull',
+        # Methods
+        'normcase', 'splitdrive', 'expandvars', 'normpath', 'abspath',
+        'join', 'split', 'splitext', 'isabs', 'basename', 'dirname',
+        'lexists', 'islink', 'ismount', 'expanduser', 'normpath', 'realpath',
+    ]
+
+    def test_normcase(self) -> None:
+        normcase = self.pathmodule.normcase
+        # check that normcase() is idempotent
+        for p in ["FoO/./BaR", b"FoO/./BaR"]:
+            p = normcase(p)
+            self.assertEqual(p, normcase(p))
+
+        self.assertEqual(normcase(''), '')
+        self.assertEqual(normcase(b''), b'')
+
+        # check that normcase raises a TypeError for invalid types
+        for path in (None, True, 0, 2.5, [], bytearray(b''), {'o','o'}):
+            self.assertRaises(TypeError, normcase, path)
+
+    def test_splitdrive(self) -> None:
+        # splitdrive for non-NT paths
+        splitdrive = self.pathmodule.splitdrive
+        self.assertEqual(splitdrive("/foo/bar"), ("", "/foo/bar"))
+        self.assertEqual(splitdrive("foo:bar"), ("", "foo:bar"))
+        self.assertEqual(splitdrive(":foo:bar"), ("", ":foo:bar"))
+
+        self.assertEqual(splitdrive(b"/foo/bar"), (b"", b"/foo/bar"))
+        self.assertEqual(splitdrive(b"foo:bar"), (b"", b"foo:bar"))
+        self.assertEqual(splitdrive(b":foo:bar"), (b"", b":foo:bar"))
+
+    def test_expandvars(self) -> None:
+        if self.pathmodule.__name__ == 'macpath':
+            self.skipTest('macpath.expandvars is a stub')
+        expandvars = self.pathmodule.expandvars
+        with support.EnvironmentVarGuard() as env:
+            env.clear()
+            env["foo"] = "bar"
+            env["{foo"] = "baz1"
+            env["{foo}"] = "baz2"
+            self.assertEqual(expandvars("foo"), "foo")
+            self.assertEqual(expandvars("$foo bar"), "bar bar")
+            self.assertEqual(expandvars("${foo}bar"), "barbar")
+            self.assertEqual(expandvars("$[foo]bar"), "$[foo]bar")
+            self.assertEqual(expandvars("$bar bar"), "$bar bar")
+            self.assertEqual(expandvars("$?bar"), "$?bar")
+            self.assertEqual(expandvars("${foo}bar"), "barbar")
+            self.assertEqual(expandvars("$foo}bar"), "bar}bar")
+            self.assertEqual(expandvars("${foo"), "${foo")
+            self.assertEqual(expandvars("${{foo}}"), "baz1}")
+            self.assertEqual(expandvars("$foo$foo"), "barbar")
+            self.assertEqual(expandvars("$bar$bar"), "$bar$bar")
+
+            self.assertEqual(expandvars(b"foo"), b"foo")
+            self.assertEqual(expandvars(b"$foo bar"), b"bar bar")
+            self.assertEqual(expandvars(b"${foo}bar"), b"barbar")
+            self.assertEqual(expandvars(b"$[foo]bar"), b"$[foo]bar")
+            self.assertEqual(expandvars(b"$bar bar"), b"$bar bar")
+            self.assertEqual(expandvars(b"$?bar"), b"$?bar")
+            self.assertEqual(expandvars(b"${foo}bar"), b"barbar")
+            self.assertEqual(expandvars(b"$foo}bar"), b"bar}bar")
+            self.assertEqual(expandvars(b"${foo"), b"${foo")
+            self.assertEqual(expandvars(b"${{foo}}"), b"baz1}")
+            self.assertEqual(expandvars(b"$foo$foo"), b"barbar")
+            self.assertEqual(expandvars(b"$bar$bar"), b"$bar$bar")
+
+    def test_abspath(self) -> None:
+        self.assertIn("foo", self.pathmodule.abspath("foo"))
+        self.assertIn(b"foo", self.pathmodule.abspath(b"foo"))
+
+        # Abspath returns bytes when the arg is bytes
+        for path in (b'', b'foo', b'f\xf2\xf2', b'/foo', b'C:\\'):
+            self.assertIsInstance(self.pathmodule.abspath(path), bytes)
+
+    def test_realpath(self) -> None:
+        self.assertIn("foo", self.pathmodule.realpath("foo"))
+        self.assertIn(b"foo", self.pathmodule.realpath(b"foo"))
+
+    def test_normpath_issue5827(self) -> None:
+        # Make sure normpath preserves unicode
+        for path in ('', '.', '/', '\\', '///foo/.//bar//'):
+            self.assertIsInstance(self.pathmodule.normpath(path), str)
+
+    def test_abspath_issue3426(self) -> None:
+        # Check that abspath returns unicode when the arg is unicode
+        # with both ASCII and non-ASCII cwds.
+        abspath = self.pathmodule.abspath
+        for path in ('', 'fuu', 'f\xf9\xf9', '/fuu', 'U:\\'):
+            self.assertIsInstance(abspath(path), str)
+
+        unicwd = '\xe7w\xf0'
+        try:
+            fsencoding = support.TESTFN_ENCODING or "ascii"
+            unicwd.encode(fsencoding)
+        except (AttributeError, UnicodeEncodeError):
+            # FS encoding is probably ASCII
+            pass
+        else:
+            with support.temp_cwd(unicwd):
+                for path in ('', 'fuu', 'f\xf9\xf9', '/fuu', 'U:\\'):
+                    self.assertIsInstance(abspath(path), str)
+
+    @unittest.skipIf(sys.platform == 'darwin',
+        "Mac OS X denies the creation of a directory with an invalid utf8 name")
+    def test_nonascii_abspath(self) -> None:
+        # Test non-ASCII, non-UTF8 bytes in the path.
+        with support.temp_cwd(b'\xe7w\xf0'):
+            self.test_abspath()
+
+
+def test_main() -> None:
+    support.run_unittest(GenericTest)
+
+
+if __name__=="__main__":
+    test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_getopt.py b/test-data/stdlib-samples/3.2/test/test_getopt.py
new file mode 100644
index 0000000..3320552
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_getopt.py
@@ -0,0 +1,190 @@
+# test_getopt.py
+# David Goodger <dgoodger at bigfoot.com> 2000-08-19
+
+from test.support import verbose, run_doctest, run_unittest, EnvironmentVarGuard
+import unittest
+
+import getopt
+
+from typing import cast, Any
+
+sentinel = object()
+
+class GetoptTests(unittest.TestCase):
+    def setUp(self) -> None:
+        self.env = EnvironmentVarGuard()
+        if "POSIXLY_CORRECT" in self.env:
+            del self.env["POSIXLY_CORRECT"]
+
+    def tearDown(self) -> None:
+        self.env.__exit__()
+        del self.env
+
+    def assertError(self, *args: Any, **kwargs: Any) -> None:
+        # JLe: work around mypy bug #229
+        cast(Any, self.assertRaises)(getopt.GetoptError, *args, **kwargs)
+
+    def test_short_has_arg(self) -> None:
+        self.assertTrue(getopt.short_has_arg('a', 'a:'))
+        self.assertFalse(getopt.short_has_arg('a', 'a'))
+        self.assertError(getopt.short_has_arg, 'a', 'b')
+
+    def test_long_has_args(self) -> None:
+        has_arg, option = getopt.long_has_args('abc', ['abc='])
+        self.assertTrue(has_arg)
+        self.assertEqual(option, 'abc')
+
+        has_arg, option = getopt.long_has_args('abc', ['abc'])
+        self.assertFalse(has_arg)
+        self.assertEqual(option, 'abc')
+
+        has_arg, option = getopt.long_has_args('abc', ['abcd'])
+        self.assertFalse(has_arg)
+        self.assertEqual(option, 'abcd')
+
+        self.assertError(getopt.long_has_args, 'abc', ['def'])
+        self.assertError(getopt.long_has_args, 'abc', [])
+        self.assertError(getopt.long_has_args, 'abc', ['abcd','abcde'])
+
+    def test_do_shorts(self) -> None:
+        opts, args = getopt.do_shorts([], 'a', 'a', [])
+        self.assertEqual(opts, [('-a', '')])
+        self.assertEqual(args, [])
+
+        opts, args = getopt.do_shorts([], 'a1', 'a:', [])
+        self.assertEqual(opts, [('-a', '1')])
+        self.assertEqual(args, [])
+
+        #opts, args = getopt.do_shorts([], 'a=1', 'a:', [])
+        #self.assertEqual(opts, [('-a', '1')])
+        #self.assertEqual(args, [])
+
+        opts, args = getopt.do_shorts([], 'a', 'a:', ['1'])
+        self.assertEqual(opts, [('-a', '1')])
+        self.assertEqual(args, [])
+
+        opts, args = getopt.do_shorts([], 'a', 'a:', ['1', '2'])
+        self.assertEqual(opts, [('-a', '1')])
+        self.assertEqual(args, ['2'])
+
+        self.assertError(getopt.do_shorts, [], 'a1', 'a', [])
+        self.assertError(getopt.do_shorts, [], 'a', 'a:', [])
+
+    def test_do_longs(self) -> None:
+        opts, args = getopt.do_longs([], 'abc', ['abc'], [])
+        self.assertEqual(opts, [('--abc', '')])
+        self.assertEqual(args, [])
+
+        opts, args = getopt.do_longs([], 'abc=1', ['abc='], [])
+        self.assertEqual(opts, [('--abc', '1')])
+        self.assertEqual(args, [])
+
+        opts, args = getopt.do_longs([], 'abc=1', ['abcd='], [])
+        self.assertEqual(opts, [('--abcd', '1')])
+        self.assertEqual(args, [])
+
+        opts, args = getopt.do_longs([], 'abc', ['ab', 'abc', 'abcd'], [])
+        self.assertEqual(opts, [('--abc', '')])
+        self.assertEqual(args, [])
+
+        # Much like the preceding, except with a non-alpha character ("-") in
+        # option name that precedes "="; failed in
+        # http://python.org/sf/126863
+        opts, args = getopt.do_longs([], 'foo=42', ['foo-bar', 'foo=',], [])
+        self.assertEqual(opts, [('--foo', '42')])
+        self.assertEqual(args, [])
+
+        self.assertError(getopt.do_longs, [], 'abc=1', ['abc'], [])
+        self.assertError(getopt.do_longs, [], 'abc', ['abc='], [])
+
+    def test_getopt(self) -> None:
+        # note: the empty string between '-a' and '--beta' is significant:
+        # it simulates an empty string option argument ('-a ""') on the
+        # command line.
+        cmdline = ['-a', '1', '-b', '--alpha=2', '--beta', '-a', '3', '-a',
+                   '', '--beta', 'arg1', 'arg2']
+
+        opts, args = getopt.getopt(cmdline, 'a:b', ['alpha=', 'beta'])
+        self.assertEqual(opts, [('-a', '1'), ('-b', ''),
+                                ('--alpha', '2'), ('--beta', ''),
+                                ('-a', '3'), ('-a', ''), ('--beta', '')])
+        # Note ambiguity of ('-b', '') and ('-a', '') above. This must be
+        # accounted for in the code that calls getopt().
+        self.assertEqual(args, ['arg1', 'arg2'])
+
+        self.assertError(getopt.getopt, cmdline, 'a:b', ['alpha', 'beta'])
+
+    def test_gnu_getopt(self) -> None:
+        # Test handling of GNU style scanning mode.
+        cmdline = ['-a', 'arg1', '-b', '1', '--alpha', '--beta=2']
+
+        # GNU style
+        opts, args = getopt.gnu_getopt(cmdline, 'ab:', ['alpha', 'beta='])
+        self.assertEqual(args, ['arg1'])
+        self.assertEqual(opts, [('-a', ''), ('-b', '1'),
+                                ('--alpha', ''), ('--beta', '2')])
+
+        # recognize "-" as an argument
+        opts, args = getopt.gnu_getopt(['-a', '-', '-b', '-'], 'ab:', [])
+        self.assertEqual(args, ['-'])
+        self.assertEqual(opts, [('-a', ''), ('-b', '-')])
+
+        # Posix style via +
+        opts, args = getopt.gnu_getopt(cmdline, '+ab:', ['alpha', 'beta='])
+        self.assertEqual(opts, [('-a', '')])
+        self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2'])
+
+        # Posix style via POSIXLY_CORRECT
+        self.env["POSIXLY_CORRECT"] = "1"
+        opts, args = getopt.gnu_getopt(cmdline, 'ab:', ['alpha', 'beta='])
+        self.assertEqual(opts, [('-a', '')])
+        self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2'])
+
+    def test_libref_examples(self) -> None:
+        s = """
+        Examples from the Library Reference:  Doc/lib/libgetopt.tex
+
+        An example using only Unix style options:
+
+
+        >>> import getopt
+        >>> args = '-a -b -cfoo -d bar a1 a2'.split()
+        >>> args
+        ['-a', '-b', '-cfoo', '-d', 'bar', 'a1', 'a2']
+        >>> optlist, args = getopt.getopt(args, 'abc:d:')
+        >>> optlist
+        [('-a', ''), ('-b', ''), ('-c', 'foo'), ('-d', 'bar')]
+        >>> args
+        ['a1', 'a2']
+
+        Using long option names is equally easy:
+
+
+        >>> s = '--condition=foo --testing --output-file abc.def -x a1 a2'
+        >>> args = s.split()
+        >>> args
+        ['--condition=foo', '--testing', '--output-file', 'abc.def', '-x', 'a1', 'a2']
+        >>> optlist, args = getopt.getopt(args, 'x', [
+        ...     'condition=', 'output-file=', 'testing'])
+        >>> optlist
+        [('--condition', 'foo'), ('--testing', ''), ('--output-file', 'abc.def'), ('-x', '')]
+        >>> args
+        ['a1', 'a2']
+        """
+
+        import types
+        m = types.ModuleType("libreftest", s)
+        run_doctest(m, verbose)
+
+    def test_issue4629(self) -> None:
+        longopts, shortopts = getopt.getopt(['--help='], '', ['help='])
+        self.assertEqual(longopts, [('--help', '')])
+        longopts, shortopts = getopt.getopt(['--help=x'], '', ['help='])
+        self.assertEqual(longopts, [('--help', 'x')])
+        self.assertRaises(getopt.GetoptError, getopt.getopt, ['--help='], '', ['help'])
+
+def test_main() -> None:
+    run_unittest(GetoptTests)
+
+if __name__ == "__main__":
+    test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_glob.py b/test-data/stdlib-samples/3.2/test/test_glob.py
new file mode 100644
index 0000000..08c8932
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_glob.py
@@ -0,0 +1,122 @@
+import unittest
+from test.support import run_unittest, TESTFN, skip_unless_symlink, can_symlink
+import glob
+import os
+import shutil
+
+from typing import TypeVar, Iterable, List, cast
+
+T = TypeVar('T')
+
+class GlobTests(unittest.TestCase):
+
+    tempdir = ''
+
+    # JLe: work around mypy issue #231
+    def norm(self, first: str, *parts: str) -> str:
+        return os.path.normpath(os.path.join(self.tempdir, first, *parts))
+
+    def mktemp(self, *parts: str) -> None:
+        filename = self.norm(*parts)
+        base, file = os.path.split(filename)
+        if not os.path.exists(base):
+            os.makedirs(base)
+        f = open(filename, 'w')
+        f.close()
+
+    def setUp(self) -> None:
+        self.tempdir = TESTFN+"_dir"
+        self.mktemp('a', 'D')
+        self.mktemp('aab', 'F')
+        self.mktemp('aaa', 'zzzF')
+        self.mktemp('ZZZ')
+        self.mktemp('a', 'bcd', 'EF')
+        self.mktemp('a', 'bcd', 'efg', 'ha')
+        if can_symlink():
+            os.symlink(self.norm('broken'), self.norm('sym1'))
+            os.symlink(self.norm('broken'), self.norm('sym2'))
+
+    def tearDown(self) -> None:
+        shutil.rmtree(self.tempdir)
+
+    def glob(self, *parts: str) -> List[str]:
+        if len(parts) == 1:
+            pattern = parts[0]
+        else:
+            pattern = os.path.join(*parts)
+        p = os.path.join(self.tempdir, pattern)
+        res = glob.glob(p)
+        self.assertEqual(list(glob.iglob(p)), res)
+        return res
+
+    def assertSequencesEqual_noorder(self, l1: Iterable[T],
+                                     l2: Iterable[T]) -> None:
+        self.assertEqual(set(l1), set(l2))
+
+    def test_glob_literal(self) -> None:
+        eq = self.assertSequencesEqual_noorder
+        eq(self.glob('a'), [self.norm('a')])
+        eq(self.glob('a', 'D'), [self.norm('a', 'D')])
+        eq(self.glob('aab'), [self.norm('aab')])
+        eq(self.glob('zymurgy'), cast(List[str], []))  # JLe: work around #230
+
+        # test return types are unicode, but only if os.listdir
+        # returns unicode filenames
+        uniset = set([str])
+        tmp = os.listdir('.')
+        if set(type(x) for x in tmp) == uniset:
+            u1 = glob.glob('*')
+            u2 = glob.glob('./*')
+            self.assertEqual(set(type(r) for r in u1), uniset)
+            self.assertEqual(set(type(r) for r in u2), uniset)
+
+    def test_glob_one_directory(self) -> None:
+        eq = self.assertSequencesEqual_noorder
+        eq(self.glob('a*'), map(self.norm, ['a', 'aab', 'aaa']))
+        eq(self.glob('*a'), map(self.norm, ['a', 'aaa']))
+        eq(self.glob('aa?'), map(self.norm, ['aaa', 'aab']))
+        eq(self.glob('aa[ab]'), map(self.norm, ['aaa', 'aab']))
+        eq(self.glob('*q'), cast(List[str], []))  # JLe: work around #230
+
+    def test_glob_nested_directory(self) -> None:
+        eq = self.assertSequencesEqual_noorder
+        if os.path.normcase("abCD") == "abCD":
+            # case-sensitive filesystem
+            eq(self.glob('a', 'bcd', 'E*'), [self.norm('a', 'bcd', 'EF')])
+        else:
+            # case insensitive filesystem
+            eq(self.glob('a', 'bcd', 'E*'), [self.norm('a', 'bcd', 'EF'),
+                                             self.norm('a', 'bcd', 'efg')])
+        eq(self.glob('a', 'bcd', '*g'), [self.norm('a', 'bcd', 'efg')])
+
+    def test_glob_directory_names(self) -> None:
+        eq = self.assertSequencesEqual_noorder
+        eq(self.glob('*', 'D'), [self.norm('a', 'D')])
+        eq(self.glob('*', '*a'), cast(List[str], []))  # JLe: work around #230
+        eq(self.glob('a', '*', '*', '*a'),
+           [self.norm('a', 'bcd', 'efg', 'ha')])
+        eq(self.glob('?a?', '*F'), map(self.norm, [os.path.join('aaa', 'zzzF'),
+                                                   os.path.join('aab', 'F')]))
+
+    def test_glob_directory_with_trailing_slash(self) -> None:
+        # We are verifying that when there is wildcard pattern which
+        # ends with os.sep doesn't blow up.
+        res = glob.glob(self.tempdir + '*' + os.sep)
+        self.assertEqual(len(res), 1)
+        # either of these results are reasonable
+        self.assertIn(res[0], [self.tempdir, self.tempdir + os.sep])
+
+    @skip_unless_symlink
+    def test_glob_broken_symlinks(self) -> None:
+        eq = self.assertSequencesEqual_noorder
+        eq(self.glob('sym*'), [self.norm('sym1'), self.norm('sym2')])
+        eq(self.glob('sym1'), [self.norm('sym1')])
+        eq(self.glob('sym2'), [self.norm('sym2')])
+
+
+def test_main() -> None:
+    run_unittest(GlobTests)
+
+
+if __name__ == "__main__":
+    test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_posixpath.py b/test-data/stdlib-samples/3.2/test/test_posixpath.py
new file mode 100644
index 0000000..de98975
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_posixpath.py
@@ -0,0 +1,531 @@
+import unittest
+from test import support, test_genericpath
+
+import posixpath
+import genericpath
+
+import imp
+imp.reload(posixpath) # Make sure we are using the local copy
+imp.reload(genericpath)
+
+import os
+import sys
+from posixpath import realpath, abspath, dirname, basename
+
+import posix
+from typing import cast, Any, TypeVar, Callable
+
+T = TypeVar('T')
+
+# An absolute path to a temporary filename for testing. We can't rely on TESTFN
+# being an absolute path, so we need this.
+
+ABSTFN = abspath(support.TESTFN)
+
+def skip_if_ABSTFN_contains_backslash(
+        test: Callable[[T], None]) -> Callable[[T], None]:
+    """
+    On Windows, posixpath.abspath still returns paths with backslashes
+    instead of posix forward slashes. If this is the case, several tests
+    fail, so skip them.
+    """
+    found_backslash = '\\' in ABSTFN
+    msg = "ABSTFN is not a posix path - tests fail"
+    return [test, unittest.skip(msg)(test)][found_backslash]
+
+def safe_rmdir(dirname: str) -> None:
+    try:
+        os.rmdir(dirname)
+    except OSError:
+        pass
+
+class PosixPathTest(unittest.TestCase):
+
+    def setUp(self) -> None:
+        self.tearDown()
+
+    def tearDown(self) -> None:
+        for suffix in ["", "1", "2"]:
+            support.unlink(support.TESTFN + suffix)
+            safe_rmdir(support.TESTFN + suffix)
+
+    def test_join(self) -> None:
+        self.assertEqual(posixpath.join("/foo", "bar", "/bar", "baz"),
+                         "/bar/baz")
+        self.assertEqual(posixpath.join("/foo", "bar", "baz"), "/foo/bar/baz")
+        self.assertEqual(posixpath.join("/foo/", "bar/", "baz/"),
+                         "/foo/bar/baz/")
+
+        self.assertEqual(posixpath.join(b"/foo", b"bar", b"/bar", b"baz"),
+                         b"/bar/baz")
+        self.assertEqual(posixpath.join(b"/foo", b"bar", b"baz"),
+                         b"/foo/bar/baz")
+        self.assertEqual(posixpath.join(b"/foo/", b"bar/", b"baz/"),
+                         b"/foo/bar/baz/")
+
+        self.assertRaises(TypeError, posixpath.join, b"bytes", "str")
+        self.assertRaises(TypeError, posixpath.join, "str", b"bytes")
+
+    def test_split(self) -> None:
+        self.assertEqual(posixpath.split("/foo/bar"), ("/foo", "bar"))
+        self.assertEqual(posixpath.split("/"), ("/", ""))
+        self.assertEqual(posixpath.split("foo"), ("", "foo"))
+        self.assertEqual(posixpath.split("////foo"), ("////", "foo"))
+        self.assertEqual(posixpath.split("//foo//bar"), ("//foo", "bar"))
+
+        self.assertEqual(posixpath.split(b"/foo/bar"), (b"/foo", b"bar"))
+        self.assertEqual(posixpath.split(b"/"), (b"/", b""))
+        self.assertEqual(posixpath.split(b"foo"), (b"", b"foo"))
+        self.assertEqual(posixpath.split(b"////foo"), (b"////", b"foo"))
+        self.assertEqual(posixpath.split(b"//foo//bar"), (b"//foo", b"bar"))
+
+    def splitextTest(self, path: str, filename: str, ext: str) -> None:
+        self.assertEqual(posixpath.splitext(path), (filename, ext))
+        self.assertEqual(posixpath.splitext("/" + path), ("/" + filename, ext))
+        self.assertEqual(posixpath.splitext("abc/" + path),
+                         ("abc/" + filename, ext))
+        self.assertEqual(posixpath.splitext("abc.def/" + path),
+                         ("abc.def/" + filename, ext))
+        self.assertEqual(posixpath.splitext("/abc.def/" + path),
+                         ("/abc.def/" + filename, ext))
+        self.assertEqual(posixpath.splitext(path + "/"),
+                         (filename + ext + "/", ""))
+
+        pathb = bytes(path, "ASCII")
+        filenameb = bytes(filename, "ASCII")
+        extb = bytes(ext, "ASCII")
+
+        self.assertEqual(posixpath.splitext(pathb), (filenameb, extb))
+        self.assertEqual(posixpath.splitext(b"/" + pathb),
+                         (b"/" + filenameb, extb))
+        self.assertEqual(posixpath.splitext(b"abc/" + pathb),
+                         (b"abc/" + filenameb, extb))
+        self.assertEqual(posixpath.splitext(b"abc.def/" + pathb),
+                         (b"abc.def/" + filenameb, extb))
+        self.assertEqual(posixpath.splitext(b"/abc.def/" + pathb),
+                         (b"/abc.def/" + filenameb, extb))
+        self.assertEqual(posixpath.splitext(pathb + b"/"),
+                         (filenameb + extb + b"/", b""))
+
+    def test_splitext(self) -> None:
+        self.splitextTest("foo.bar", "foo", ".bar")
+        self.splitextTest("foo.boo.bar", "foo.boo", ".bar")
+        self.splitextTest("foo.boo.biff.bar", "foo.boo.biff", ".bar")
+        self.splitextTest(".csh.rc", ".csh", ".rc")
+        self.splitextTest("nodots", "nodots", "")
+        self.splitextTest(".cshrc", ".cshrc", "")
+        self.splitextTest("...manydots", "...manydots", "")
+        self.splitextTest("...manydots.ext", "...manydots", ".ext")
+        self.splitextTest(".", ".", "")
+        self.splitextTest("..", "..", "")
+        self.splitextTest("........", "........", "")
+        self.splitextTest("", "", "")
+
+    def test_isabs(self) -> None:
+        self.assertIs(posixpath.isabs(""), False)
+        self.assertIs(posixpath.isabs("/"), True)
+        self.assertIs(posixpath.isabs("/foo"), True)
+        self.assertIs(posixpath.isabs("/foo/bar"), True)
+        self.assertIs(posixpath.isabs("foo/bar"), False)
+
+        self.assertIs(posixpath.isabs(b""), False)
+        self.assertIs(posixpath.isabs(b"/"), True)
+        self.assertIs(posixpath.isabs(b"/foo"), True)
+        self.assertIs(posixpath.isabs(b"/foo/bar"), True)
+        self.assertIs(posixpath.isabs(b"foo/bar"), False)
+
+    def test_basename(self) -> None:
+        self.assertEqual(posixpath.basename("/foo/bar"), "bar")
+        self.assertEqual(posixpath.basename("/"), "")
+        self.assertEqual(posixpath.basename("foo"), "foo")
+        self.assertEqual(posixpath.basename("////foo"), "foo")
+        self.assertEqual(posixpath.basename("//foo//bar"), "bar")
+
+        self.assertEqual(posixpath.basename(b"/foo/bar"), b"bar")
+        self.assertEqual(posixpath.basename(b"/"), b"")
+        self.assertEqual(posixpath.basename(b"foo"), b"foo")
+        self.assertEqual(posixpath.basename(b"////foo"), b"foo")
+        self.assertEqual(posixpath.basename(b"//foo//bar"), b"bar")
+
+    def test_dirname(self) -> None:
+        self.assertEqual(posixpath.dirname("/foo/bar"), "/foo")
+        self.assertEqual(posixpath.dirname("/"), "/")
+        self.assertEqual(posixpath.dirname("foo"), "")
+        self.assertEqual(posixpath.dirname("////foo"), "////")
+        self.assertEqual(posixpath.dirname("//foo//bar"), "//foo")
+
+        self.assertEqual(posixpath.dirname(b"/foo/bar"), b"/foo")
+        self.assertEqual(posixpath.dirname(b"/"), b"/")
+        self.assertEqual(posixpath.dirname(b"foo"), b"")
+        self.assertEqual(posixpath.dirname(b"////foo"), b"////")
+        self.assertEqual(posixpath.dirname(b"//foo//bar"), b"//foo")
+
+    def test_islink(self) -> None:
+        self.assertIs(posixpath.islink(support.TESTFN + "1"), False)
+        self.assertIs(posixpath.lexists(support.TESTFN + "2"), False)
+        f = open(support.TESTFN + "1", "wb")
+        try:
+            f.write(b"foo")
+            f.close()
+            self.assertIs(posixpath.islink(support.TESTFN + "1"), False)
+            if support.can_symlink():
+                os.symlink(support.TESTFN + "1", support.TESTFN + "2")
+                self.assertIs(posixpath.islink(support.TESTFN + "2"), True)
+                os.remove(support.TESTFN + "1")
+                self.assertIs(posixpath.islink(support.TESTFN + "2"), True)
+                self.assertIs(posixpath.exists(support.TESTFN + "2"), False)
+                self.assertIs(posixpath.lexists(support.TESTFN + "2"), True)
+        finally:
+            if not f.closed:
+                f.close()
+
+    @staticmethod
+    def _create_file(filename: str) -> None:
+        with open(filename, 'wb') as f:
+            f.write(b'foo')
+
+    def test_samefile(self) -> None:
+        test_fn = support.TESTFN + "1"
+        self._create_file(test_fn)
+        self.assertTrue(posixpath.samefile(test_fn, test_fn))
+        self.assertRaises(TypeError, posixpath.samefile)
+
+    @unittest.skipIf(
+        sys.platform.startswith('win'),
+        "posixpath.samefile does not work on links in Windows")
+    @unittest.skipUnless(hasattr(os, "symlink"),
+                         "Missing symlink implementation")
+    def test_samefile_on_links(self) -> None:
+        test_fn1 = support.TESTFN + "1"
+        test_fn2 = support.TESTFN + "2"
+        self._create_file(test_fn1)
+
+        os.symlink(test_fn1, test_fn2)
+        self.assertTrue(posixpath.samefile(test_fn1, test_fn2))
+        os.remove(test_fn2)
+
+        self._create_file(test_fn2)
+        self.assertFalse(posixpath.samefile(test_fn1, test_fn2))
+
+
+    def test_samestat(self) -> None:
+        test_fn = support.TESTFN + "1"
+        self._create_file(test_fn)
+        test_fns = [test_fn]*2
+        stats = map(os.stat, test_fns)
+        self.assertTrue(posixpath.samestat(*stats))
+
+    @unittest.skipIf(
+        sys.platform.startswith('win'),
+        "posixpath.samestat does not work on links in Windows")
+    @unittest.skipUnless(hasattr(os, "symlink"),
+                         "Missing symlink implementation")
+    def test_samestat_on_links(self) -> None:
+        test_fn1 = support.TESTFN + "1"
+        test_fn2 = support.TESTFN + "2"
+        self._create_file(test_fn1)
+        test_fns = [test_fn1, test_fn2]
+        cast(Any, os.symlink)(*test_fns)
+        stats = map(os.stat, test_fns)
+        self.assertTrue(posixpath.samestat(*stats))
+        os.remove(test_fn2)
+
+        self._create_file(test_fn2)
+        stats = map(os.stat, test_fns)
+        self.assertFalse(posixpath.samestat(*stats))
+
+        self.assertRaises(TypeError, posixpath.samestat)
+
+    def test_ismount(self) -> None:
+        self.assertIs(posixpath.ismount("/"), True)
+        self.assertIs(posixpath.ismount(b"/"), True)
+
+    def test_ismount_non_existent(self) -> None:
+        # Non-existent mountpoint.
+        self.assertIs(posixpath.ismount(ABSTFN), False)
+        try:
+            os.mkdir(ABSTFN)
+            self.assertIs(posixpath.ismount(ABSTFN), False)
+        finally:
+            safe_rmdir(ABSTFN)
+
+    @unittest.skipUnless(support.can_symlink(),
+                         "Test requires symlink support")
+    def test_ismount_symlinks(self) -> None:
+        # Symlinks are never mountpoints.
+        try:
+            os.symlink("/", ABSTFN)
+            self.assertIs(posixpath.ismount(ABSTFN), False)
+        finally:
+            os.unlink(ABSTFN)
+
+    @unittest.skipIf(posix is None, "Test requires posix module")
+    def test_ismount_different_device(self) -> None:
+        # Simulate the path being on a different device from its parent by
+        # mocking out st_dev.
+        save_lstat = os.lstat
+        def fake_lstat(path):
+            st_ino = 0
+            st_dev = 0
+            if path == ABSTFN:
+                st_dev = 1
+                st_ino = 1
+            return posix.stat_result((0, st_ino, st_dev, 0, 0, 0, 0, 0, 0, 0))
+        try:
+            setattr(os, 'lstat', fake_lstat) # mypy: can't modify os directly
+            self.assertIs(posixpath.ismount(ABSTFN), True)
+        finally:
+            setattr(os, 'lstat', save_lstat)
+
+    def test_expanduser(self) -> None:
+        self.assertEqual(posixpath.expanduser("foo"), "foo")
+        self.assertEqual(posixpath.expanduser(b"foo"), b"foo")
+        try:
+            import pwd
+        except ImportError:
+            pass
+        else:
+            self.assertIsInstance(posixpath.expanduser("~/"), str)
+            self.assertIsInstance(posixpath.expanduser(b"~/"), bytes)
+            # if home directory == root directory, this test makes no sense
+            if posixpath.expanduser("~") != '/':
+                self.assertEqual(
+                    posixpath.expanduser("~") + "/",
+                    posixpath.expanduser("~/")
+                )
+                self.assertEqual(
+                    posixpath.expanduser(b"~") + b"/",
+                    posixpath.expanduser(b"~/")
+                )
+            self.assertIsInstance(posixpath.expanduser("~root/"), str)
+            self.assertIsInstance(posixpath.expanduser("~foo/"), str)
+            self.assertIsInstance(posixpath.expanduser(b"~root/"), bytes)
+            self.assertIsInstance(posixpath.expanduser(b"~foo/"), bytes)
+
+            with support.EnvironmentVarGuard() as env:
+                env['HOME'] = '/'
+                self.assertEqual(posixpath.expanduser("~"), "/")
+                # expanduser should fall back to using the password database
+                del env['HOME']
+                home = pwd.getpwuid(os.getuid()).pw_dir
+                self.assertEqual(posixpath.expanduser("~"), home)
+
+    def test_normpath(self) -> None:
+        self.assertEqual(posixpath.normpath(""), ".")
+        self.assertEqual(posixpath.normpath("/"), "/")
+        self.assertEqual(posixpath.normpath("//"), "//")
+        self.assertEqual(posixpath.normpath("///"), "/")
+        self.assertEqual(posixpath.normpath("///foo/.//bar//"), "/foo/bar")
+        self.assertEqual(posixpath.normpath("///foo/.//bar//.//..//.//baz"),
+                         "/foo/baz")
+        self.assertEqual(posixpath.normpath("///..//./foo/.//bar"), "/foo/bar")
+
+        self.assertEqual(posixpath.normpath(b""), b".")
+        self.assertEqual(posixpath.normpath(b"/"), b"/")
+        self.assertEqual(posixpath.normpath(b"//"), b"//")
+        self.assertEqual(posixpath.normpath(b"///"), b"/")
+        self.assertEqual(posixpath.normpath(b"///foo/.//bar//"), b"/foo/bar")
+        self.assertEqual(posixpath.normpath(b"///foo/.//bar//.//..//.//baz"),
+                         b"/foo/baz")
+        self.assertEqual(posixpath.normpath(b"///..//./foo/.//bar"),
+                         b"/foo/bar")
+
+    @unittest.skipUnless(hasattr(os, "symlink"),
+                         "Missing symlink implementation")
+    @skip_if_ABSTFN_contains_backslash
+    def test_realpath_basic(self) -> None:
+        # Basic operation.
+        try:
+            os.symlink(ABSTFN+"1", ABSTFN)
+            self.assertEqual(realpath(ABSTFN), ABSTFN+"1")
+        finally:
+            support.unlink(ABSTFN)
+
+    @unittest.skipUnless(hasattr(os, "symlink"),
+                         "Missing symlink implementation")
+    @skip_if_ABSTFN_contains_backslash
+    def test_realpath_relative(self) -> None:
+        try:
+            os.symlink(posixpath.relpath(ABSTFN+"1"), ABSTFN)
+            self.assertEqual(realpath(ABSTFN), ABSTFN+"1")
+        finally:
+            support.unlink(ABSTFN)
+
+    @unittest.skipUnless(hasattr(os, "symlink"),
+                         "Missing symlink implementation")
+    @skip_if_ABSTFN_contains_backslash
+    def test_realpath_symlink_loops(self) -> None:
+        # Bug #930024, return the path unchanged if we get into an infinite
+        # symlink loop.
+        try:
+            old_path = abspath('.')
+            os.symlink(ABSTFN, ABSTFN)
+            self.assertEqual(realpath(ABSTFN), ABSTFN)
+
+            os.symlink(ABSTFN+"1", ABSTFN+"2")
+            os.symlink(ABSTFN+"2", ABSTFN+"1")
+            self.assertEqual(realpath(ABSTFN+"1"), ABSTFN+"1")
+            self.assertEqual(realpath(ABSTFN+"2"), ABSTFN+"2")
+
+            # Test using relative path as well.
+            os.chdir(dirname(ABSTFN))
+            self.assertEqual(realpath(basename(ABSTFN)), ABSTFN)
+        finally:
+            os.chdir(old_path)
+            support.unlink(ABSTFN)
+            support.unlink(ABSTFN+"1")
+            support.unlink(ABSTFN+"2")
+
+    @unittest.skipUnless(hasattr(os, "symlink"),
+                         "Missing symlink implementation")
+    @skip_if_ABSTFN_contains_backslash
+    def test_realpath_resolve_parents(self) -> None:
+        # We also need to resolve any symlinks in the parents of a relative
+        # path passed to realpath. E.g.: current working directory is
+        # /usr/doc with 'doc' being a symlink to /usr/share/doc. We call
+        # realpath("a"). This should return /usr/share/doc/a/.
+        try:
+            old_path = abspath('.')
+            os.mkdir(ABSTFN)
+            os.mkdir(ABSTFN + "/y")
+            os.symlink(ABSTFN + "/y", ABSTFN + "/k")
+
+            os.chdir(ABSTFN + "/k")
+            self.assertEqual(realpath("a"), ABSTFN + "/y/a")
+        finally:
+            os.chdir(old_path)
+            support.unlink(ABSTFN + "/k")
+            safe_rmdir(ABSTFN + "/y")
+            safe_rmdir(ABSTFN)
+
+    @unittest.skipUnless(hasattr(os, "symlink"),
+                         "Missing symlink implementation")
+    @skip_if_ABSTFN_contains_backslash
+    def test_realpath_resolve_before_normalizing(self) -> None:
+        # Bug #990669: Symbolic links should be resolved before we
+        # normalize the path. E.g.: if we have directories 'a', 'k' and 'y'
+        # in the following hierarchy:
+        # a/k/y
+        #
+        # and a symbolic link 'link-y' pointing to 'y' in directory 'a',
+        # then realpath("link-y/..") should return 'k', not 'a'.
+        try:
+            old_path = abspath('.')
+            os.mkdir(ABSTFN)
+            os.mkdir(ABSTFN + "/k")
+            os.mkdir(ABSTFN + "/k/y")
+            os.symlink(ABSTFN + "/k/y", ABSTFN + "/link-y")
+
+            # Absolute path.
+            self.assertEqual(realpath(ABSTFN + "/link-y/.."), ABSTFN + "/k")
+            # Relative path.
+            os.chdir(dirname(ABSTFN))
+            self.assertEqual(realpath(basename(ABSTFN) + "/link-y/.."),
+                             ABSTFN + "/k")
+        finally:
+            os.chdir(old_path)
+            support.unlink(ABSTFN + "/link-y")
+            safe_rmdir(ABSTFN + "/k/y")
+            safe_rmdir(ABSTFN + "/k")
+            safe_rmdir(ABSTFN)
+
+    @unittest.skipUnless(hasattr(os, "symlink"),
+                         "Missing symlink implementation")
+    @skip_if_ABSTFN_contains_backslash
+    def test_realpath_resolve_first(self) -> None:
+        # Bug #1213894: The first component of the path, if not absolute,
+        # must be resolved too.
+
+        try:
+            old_path = abspath('.')
+            os.mkdir(ABSTFN)
+            os.mkdir(ABSTFN + "/k")
+            os.symlink(ABSTFN, ABSTFN + "link")
+            os.chdir(dirname(ABSTFN))
+
+            base = basename(ABSTFN)
+            self.assertEqual(realpath(base + "link"), ABSTFN)
+            self.assertEqual(realpath(base + "link/k"), ABSTFN + "/k")
+        finally:
+            os.chdir(old_path)
+            support.unlink(ABSTFN + "link")
+            safe_rmdir(ABSTFN + "/k")
+            safe_rmdir(ABSTFN)
+
+    def test_relpath(self) -> None:
+        real_getcwd = os.getcwd
+        # mypy: can't modify os directly
+        setattr(os, 'getcwd', lambda: r"/home/user/bar")
+        try:
+            curdir = os.path.split(os.getcwd())[-1]
+            self.assertRaises(ValueError, posixpath.relpath, "")
+            self.assertEqual(posixpath.relpath("a"), "a")
+            self.assertEqual(posixpath.relpath(posixpath.abspath("a")), "a")
+            self.assertEqual(posixpath.relpath("a/b"), "a/b")
+            self.assertEqual(posixpath.relpath("../a/b"), "../a/b")
+            self.assertEqual(posixpath.relpath("a", "../b"), "../"+curdir+"/a")
+            self.assertEqual(posixpath.relpath("a/b", "../c"),
+                             "../"+curdir+"/a/b")
+            self.assertEqual(posixpath.relpath("a", "b/c"), "../../a")
+            self.assertEqual(posixpath.relpath("a", "a"), ".")
+            self.assertEqual(posixpath.relpath("/foo/bar/bat", "/x/y/z"), '../../../foo/bar/bat')
+            self.assertEqual(posixpath.relpath("/foo/bar/bat", "/foo/bar"), 'bat')
+            self.assertEqual(posixpath.relpath("/foo/bar/bat", "/"), 'foo/bar/bat')
+            self.assertEqual(posixpath.relpath("/", "/foo/bar/bat"), '../../..')
+            self.assertEqual(posixpath.relpath("/foo/bar/bat", "/x"), '../foo/bar/bat')
+            self.assertEqual(posixpath.relpath("/x", "/foo/bar/bat"), '../../../x')
+            self.assertEqual(posixpath.relpath("/", "/"), '.')
+            self.assertEqual(posixpath.relpath("/a", "/a"), '.')
+            self.assertEqual(posixpath.relpath("/a/b", "/a/b"), '.')
+        finally:
+            setattr(os, 'getcwd', real_getcwd)
+
+    def test_relpath_bytes(self) -> None:
+        real_getcwdb = os.getcwdb
+        # mypy: can't modify os directly
+        setattr(os, 'getcwdb', lambda: br"/home/user/bar")
+        try:
+            curdir = os.path.split(os.getcwdb())[-1]
+            self.assertRaises(ValueError, posixpath.relpath, b"")
+            self.assertEqual(posixpath.relpath(b"a"), b"a")
+            self.assertEqual(posixpath.relpath(posixpath.abspath(b"a")), b"a")
+            self.assertEqual(posixpath.relpath(b"a/b"), b"a/b")
+            self.assertEqual(posixpath.relpath(b"../a/b"), b"../a/b")
+            self.assertEqual(posixpath.relpath(b"a", b"../b"),
+                             b"../"+curdir+b"/a")
+            self.assertEqual(posixpath.relpath(b"a/b", b"../c"),
+                             b"../"+curdir+b"/a/b")
+            self.assertEqual(posixpath.relpath(b"a", b"b/c"), b"../../a")
+            self.assertEqual(posixpath.relpath(b"a", b"a"), b".")
+            self.assertEqual(posixpath.relpath(b"/foo/bar/bat", b"/x/y/z"), b'../../../foo/bar/bat')
+            self.assertEqual(posixpath.relpath(b"/foo/bar/bat", b"/foo/bar"), b'bat')
+            self.assertEqual(posixpath.relpath(b"/foo/bar/bat", b"/"), b'foo/bar/bat')
+            self.assertEqual(posixpath.relpath(b"/", b"/foo/bar/bat"), b'../../..')
+            self.assertEqual(posixpath.relpath(b"/foo/bar/bat", b"/x"), b'../foo/bar/bat')
+            self.assertEqual(posixpath.relpath(b"/x", b"/foo/bar/bat"), b'../../../x')
+            self.assertEqual(posixpath.relpath(b"/", b"/"), b'.')
+            self.assertEqual(posixpath.relpath(b"/a", b"/a"), b'.')
+            self.assertEqual(posixpath.relpath(b"/a/b", b"/a/b"), b'.')
+
+            self.assertRaises(TypeError, posixpath.relpath, b"bytes", "str")
+            self.assertRaises(TypeError, posixpath.relpath, "str", b"bytes")
+        finally:
+            setattr(os, 'getcwdb', real_getcwdb)
+
+    def test_sameopenfile(self) -> None:
+        fname = support.TESTFN + "1"
+        with open(fname, "wb") as a, open(fname, "wb") as b:
+            self.assertTrue(posixpath.sameopenfile(a.fileno(), b.fileno()))
+
+
+class PosixCommonTest(test_genericpath.CommonTest):
+    pathmodule = posixpath
+    attributes = ['relpath', 'samefile', 'sameopenfile', 'samestat']
+
+
+def test_main() -> None:
+    support.run_unittest(PosixPathTest, PosixCommonTest)
+
+
+if __name__=="__main__":
+    test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_pprint.py b/test-data/stdlib-samples/3.2/test/test_pprint.py
new file mode 100644
index 0000000..cf54ebd
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_pprint.py
@@ -0,0 +1,488 @@
+import pprint
+import test.support
+import unittest
+import test.test_set
+import random
+import collections
+import itertools
+
+from typing import List, Any, Dict, Tuple, cast, Callable
+
+# list, tuple and dict subclasses that do or don't overwrite __repr__
+class list2(list):
+    pass
+
+class list3(list):
+    def __repr__(self) -> str:
+        return list.__repr__(self)
+
+class tuple2(tuple):
+    pass
+
+class tuple3(tuple):
+    def __repr__(self) -> str:
+        return tuple.__repr__(self)
+
+class dict2(dict):
+    pass
+
+class dict3(dict):
+    def __repr__(self) -> str:
+        return dict.__repr__(self)
+
+class Unorderable:
+    def __repr__(self) -> str:
+        return str(id(self))
+
+class QueryTestCase(unittest.TestCase):
+
+    def setUp(self) -> None:
+        self.a = list(range(100))  # type: List[Any]
+        self.b = list(range(200))  # type: List[Any]
+        self.a[-12] = self.b
+
+    def test_basic(self) -> None:
+        # Verify .isrecursive() and .isreadable() w/o recursion
+        pp = pprint.PrettyPrinter()
+        for safe in (2, 2.0, complex(0.0, 2.0), "abc", [3], (2,2), {3: 3}, "yaddayadda",
+                     self.a, self.b):
+            # module-level convenience functions
+            self.assertFalse(pprint.isrecursive(safe),
+                             "expected not isrecursive for %r" % (safe,))
+            self.assertTrue(pprint.isreadable(safe),
+                            "expected isreadable for %r" % (safe,))
+            # PrettyPrinter methods
+            self.assertFalse(pp.isrecursive(safe),
+                             "expected not isrecursive for %r" % (safe,))
+            self.assertTrue(pp.isreadable(safe),
+                            "expected isreadable for %r" % (safe,))
+
+    def test_knotted(self) -> None:
+        # Verify .isrecursive() and .isreadable() w/ recursion
+        # Tie a knot.
+        self.b[67] = self.a
+        # Messy dict.
+        self.d = {}  # type: Dict[int, dict]
+        self.d[0] = self.d[1] = self.d[2] = self.d
+
+        pp = pprint.PrettyPrinter()
+
+        for icky in self.a, self.b, self.d, (self.d, self.d):
+            self.assertTrue(pprint.isrecursive(icky), "expected isrecursive")
+            self.assertFalse(pprint.isreadable(icky), "expected not isreadable")
+            self.assertTrue(pp.isrecursive(icky), "expected isrecursive")
+            self.assertFalse(pp.isreadable(icky), "expected not isreadable")
+
+        # Break the cycles.
+        self.d.clear()
+        del self.a[:]
+        del self.b[:]
+
+        for safe in self.a, self.b, self.d, (self.d, self.d):
+            # module-level convenience functions
+            self.assertFalse(pprint.isrecursive(safe),
+                             "expected not isrecursive for %r" % (safe,))
+            self.assertTrue(pprint.isreadable(safe),
+                            "expected isreadable for %r" % (safe,))
+            # PrettyPrinter methods
+            self.assertFalse(pp.isrecursive(safe),
+                             "expected not isrecursive for %r" % (safe,))
+            self.assertTrue(pp.isreadable(safe),
+                            "expected isreadable for %r" % (safe,))
+
+    def test_unreadable(self) -> None:
+        # Not recursive but not readable anyway
+        pp = pprint.PrettyPrinter()
+        for unreadable in type(3), pprint, pprint.isrecursive:
+            # module-level convenience functions
+            self.assertFalse(pprint.isrecursive(unreadable),
+                             "expected not isrecursive for %r" % (unreadable,))
+            self.assertFalse(pprint.isreadable(unreadable),
+                             "expected not isreadable for %r" % (unreadable,))
+            # PrettyPrinter methods
+            self.assertFalse(pp.isrecursive(unreadable),
+                             "expected not isrecursive for %r" % (unreadable,))
+            self.assertFalse(pp.isreadable(unreadable),
+                             "expected not isreadable for %r" % (unreadable,))
+
+    def test_same_as_repr(self) -> None:
+        # Simple objects, small containers and classes that overwrite __repr__
+        # For those the result should be the same as repr().
+        # Ahem.  The docs don't say anything about that -- this appears to
+        # be testing an implementation quirk.  Starting in Python 2.5, it's
+        # not true for dicts:  pprint always sorts dicts by key now; before,
+        # it sorted a dict display if and only if the display required
+        # multiple lines.  For that reason, dicts with more than one element
+        # aren't tested here.
+        for simple in (0, 0, complex(0.0), 0.0, "", b"",
+                       (), tuple2(), tuple3(),
+                       [], list2(), list3(),
+                       {}, dict2(), dict3(),
+                       self.assertTrue, pprint,
+                       -6, -6, complex(-6.,-6.), -1.5, "x", b"x", (3,), [3], {3: 6},
+                       (1,2), [3,4], {5: 6},
+                       tuple2((1,2)), tuple3((1,2)), tuple3(range(100)),  # type: ignore
+                       [3,4], list2(cast(Any, [3,4])), list3(cast(Any, [3,4])),
+                       list3(cast(Any, range(100))), dict2(cast(Any, {5: 6})),
+                       dict3(cast(Any, {5: 6})), # JLe: work around mypy issue #233
+                       range(10, -11, -1)
+                      ):
+            native = repr(simple)
+            for function in "pformat", "saferepr":
+                f = getattr(pprint, function)
+                got = f(simple)
+                self.assertEqual(native, got,
+                                 "expected %s got %s from pprint.%s" %
+                                 (native, got, function))
+
+    def test_basic_line_wrap(self) -> None:
+        # verify basic line-wrapping operation
+        o = {'RPM_cal': 0,
+             'RPM_cal2': 48059,
+             'Speed_cal': 0,
+             'controldesk_runtime_us': 0,
+             'main_code_runtime_us': 0,
+             'read_io_runtime_us': 0,
+             'write_io_runtime_us': 43690}
+        exp = """\
+{'RPM_cal': 0,
+ 'RPM_cal2': 48059,
+ 'Speed_cal': 0,
+ 'controldesk_runtime_us': 0,
+ 'main_code_runtime_us': 0,
+ 'read_io_runtime_us': 0,
+ 'write_io_runtime_us': 43690}"""
+        # JLe: work around mypy issue #232
+        for type in cast(List[Any], [dict, dict2]):
+            self.assertEqual(pprint.pformat(type(o)), exp)
+
+        o2 = range(100)
+        exp = '[%s]' % ',\n '.join(map(str, o2))
+        for type in cast(List[Any], [list, list2]):
+            self.assertEqual(pprint.pformat(type(o2)), exp)
+
+        o3 = tuple(range(100))
+        exp = '(%s)' % ',\n '.join(map(str, o3))
+        for type in cast(List[Any], [tuple, tuple2]):
+            self.assertEqual(pprint.pformat(type(o3)), exp)
+
+        # indent parameter
+        o4 = range(100)
+        exp = '[   %s]' % ',\n    '.join(map(str, o4))
+        for type in cast(List[Any], [list, list2]):
+            self.assertEqual(pprint.pformat(type(o4), indent=4), exp)
+
+    def test_nested_indentations(self) -> None:
+        o1 = list(range(10))
+        o2 = {'first':1, 'second':2, 'third':3}
+        o = [o1, o2]
+        expected = """\
+[   [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
+    {   'first': 1,
+        'second': 2,
+        'third': 3}]"""
+        self.assertEqual(pprint.pformat(o, indent=4, width=42), expected)
+
+    def test_sorted_dict(self) -> None:
+        # Starting in Python 2.5, pprint sorts dict displays by key regardless
+        # of how small the dictionary may be.
+        # Before the change, on 32-bit Windows pformat() gave order
+        # 'a', 'c', 'b' here, so this test failed.
+        d = {'a': 1, 'b': 1, 'c': 1}
+        self.assertEqual(pprint.pformat(d), "{'a': 1, 'b': 1, 'c': 1}")
+        self.assertEqual(pprint.pformat([d, d]),
+            "[{'a': 1, 'b': 1, 'c': 1}, {'a': 1, 'b': 1, 'c': 1}]")
+
+        # The next one is kind of goofy.  The sorted order depends on the
+        # alphabetic order of type names:  "int" < "str" < "tuple".  Before
+        # Python 2.5, this was in the test_same_as_repr() test.  It's worth
+        # keeping around for now because it's one of few tests of pprint
+        # against a crazy mix of types.
+        self.assertEqual(pprint.pformat({"xy\tab\n": (3,), 5: [[]], (): {}}),
+            r"{5: [[]], 'xy\tab\n': (3,), (): {}}")
+
+    def test_ordered_dict(self) -> None:
+        words = 'the quick brown fox jumped over a lazy dog'.split()
+        d = collections.OrderedDict(zip(words, itertools.count()))
+        self.assertEqual(pprint.pformat(d),
+"""\
+{'the': 0,
+ 'quick': 1,
+ 'brown': 2,
+ 'fox': 3,
+ 'jumped': 4,
+ 'over': 5,
+ 'a': 6,
+ 'lazy': 7,
+ 'dog': 8}""")
+    def test_subclassing(self) -> None:
+        o = {'names with spaces': 'should be presented using repr()',
+             'others.should.not.be': 'like.this'}
+        exp = """\
+{'names with spaces': 'should be presented using repr()',
+ others.should.not.be: like.this}"""
+        self.assertEqual(DottedPrettyPrinter().pformat(o), exp)
+
+    @test.support.cpython_only
+    def test_set_reprs(self) -> None:
+        # This test creates a complex arrangement of frozensets and
+        # compares the pretty-printed repr against a string hard-coded in
+        # the test.  The hard-coded repr depends on the sort order of
+        # frozensets.
+        #
+        # However, as the docs point out: "Since sets only define
+        # partial ordering (subset relationships), the output of the
+        # list.sort() method is undefined for lists of sets."
+        #
+        # In a nutshell, the test assumes frozenset({0}) will always
+        # sort before frozenset({1}), but:
+        #
+        # >>> frozenset({0}) < frozenset({1})
+        # False
+        # >>> frozenset({1}) < frozenset({0})
+        # False
+        #
+        # Consequently, this test is fragile and
+        # implementation-dependent.  Small changes to Python's sort
+        # algorithm cause the test to fail when it should pass.
+
+        self.assertEqual(pprint.pformat(set()), 'set()')
+        self.assertEqual(pprint.pformat(set(range(3))), '{0, 1, 2}')
+        self.assertEqual(pprint.pformat(frozenset()), 'frozenset()')
+        self.assertEqual(pprint.pformat(frozenset(range(3))), 'frozenset({0, 1, 2})')
+        cube_repr_tgt = """\
+{frozenset(): frozenset({frozenset({2}), frozenset({0}), frozenset({1})}),
+ frozenset({0}): frozenset({frozenset(),
+                            frozenset({0, 2}),
+                            frozenset({0, 1})}),
+ frozenset({1}): frozenset({frozenset(),
+                            frozenset({1, 2}),
+                            frozenset({0, 1})}),
+ frozenset({2}): frozenset({frozenset(),
+                            frozenset({1, 2}),
+                            frozenset({0, 2})}),
+ frozenset({1, 2}): frozenset({frozenset({2}),
+                               frozenset({1}),
+                               frozenset({0, 1, 2})}),
+ frozenset({0, 2}): frozenset({frozenset({2}),
+                               frozenset({0}),
+                               frozenset({0, 1, 2})}),
+ frozenset({0, 1}): frozenset({frozenset({0}),
+                               frozenset({1}),
+                               frozenset({0, 1, 2})}),
+ frozenset({0, 1, 2}): frozenset({frozenset({1, 2}),
+                                  frozenset({0, 2}),
+                                  frozenset({0, 1})})}"""
+        cube = test.test_set.cube(3)
+        self.assertEqual(pprint.pformat(cube), cube_repr_tgt)
+        cubo_repr_tgt = """\
+{frozenset({frozenset({0, 2}), frozenset({0})}): frozenset({frozenset({frozenset({0,
+                                                                                  2}),
+                                                                       frozenset({0,
+                                                                                  1,
+                                                                                  2})}),
+                                                            frozenset({frozenset({0}),
+                                                                       frozenset({0,
+                                                                                  1})}),
+                                                            frozenset({frozenset(),
+                                                                       frozenset({0})}),
+                                                            frozenset({frozenset({2}),
+                                                                       frozenset({0,
+                                                                                  2})})}),
+ frozenset({frozenset({0, 1}), frozenset({1})}): frozenset({frozenset({frozenset({0,
+                                                                                  1}),
+                                                                       frozenset({0,
+                                                                                  1,
+                                                                                  2})}),
+                                                            frozenset({frozenset({0}),
+                                                                       frozenset({0,
+                                                                                  1})}),
+                                                            frozenset({frozenset({1}),
+                                                                       frozenset({1,
+                                                                                  2})}),
+                                                            frozenset({frozenset(),
+                                                                       frozenset({1})})}),
+ frozenset({frozenset({1, 2}), frozenset({1})}): frozenset({frozenset({frozenset({1,
+                                                                                  2}),
+                                                                       frozenset({0,
+                                                                                  1,
+                                                                                  2})}),
+                                                            frozenset({frozenset({2}),
+                                                                       frozenset({1,
+                                                                                  2})}),
+                                                            frozenset({frozenset(),
+                                                                       frozenset({1})}),
+                                                            frozenset({frozenset({1}),
+                                                                       frozenset({0,
+                                                                                  1})})}),
+ frozenset({frozenset({1, 2}), frozenset({2})}): frozenset({frozenset({frozenset({1,
+                                                                                  2}),
+                                                                       frozenset({0,
+                                                                                  1,
+                                                                                  2})}),
+                                                            frozenset({frozenset({1}),
+                                                                       frozenset({1,
+                                                                                  2})}),
+                                                            frozenset({frozenset({2}),
+                                                                       frozenset({0,
+                                                                                  2})}),
+                                                            frozenset({frozenset(),
+                                                                       frozenset({2})})}),
+ frozenset({frozenset(), frozenset({0})}): frozenset({frozenset({frozenset({0}),
+                                                                 frozenset({0,
+                                                                            1})}),
+                                                      frozenset({frozenset({0}),
+                                                                 frozenset({0,
+                                                                            2})}),
+                                                      frozenset({frozenset(),
+                                                                 frozenset({1})}),
+                                                      frozenset({frozenset(),
+                                                                 frozenset({2})})}),
+ frozenset({frozenset(), frozenset({1})}): frozenset({frozenset({frozenset(),
+                                                                 frozenset({0})}),
+                                                      frozenset({frozenset({1}),
+                                                                 frozenset({1,
+                                                                            2})}),
+                                                      frozenset({frozenset(),
+                                                                 frozenset({2})}),
+                                                      frozenset({frozenset({1}),
+                                                                 frozenset({0,
+                                                                            1})})}),
+ frozenset({frozenset({2}), frozenset()}): frozenset({frozenset({frozenset({2}),
+                                                                 frozenset({1,
+                                                                            2})}),
+                                                      frozenset({frozenset(),
+                                                                 frozenset({0})}),
+                                                      frozenset({frozenset(),
+                                                                 frozenset({1})}),
+                                                      frozenset({frozenset({2}),
+                                                                 frozenset({0,
+                                                                            2})})}),
+ frozenset({frozenset({0, 1, 2}), frozenset({0, 1})}): frozenset({frozenset({frozenset({1,
+                                                                                        2}),
+                                                                             frozenset({0,
+                                                                                        1,
+                                                                                        2})}),
+                                                                  frozenset({frozenset({0,
+                                                                                        2}),
+                                                                             frozenset({0,
+                                                                                        1,
+                                                                                        2})}),
+                                                                  frozenset({frozenset({0}),
+                                                                             frozenset({0,
+                                                                                        1})}),
+                                                                  frozenset({frozenset({1}),
+                                                                             frozenset({0,
+                                                                                        1})})}),
+ frozenset({frozenset({0}), frozenset({0, 1})}): frozenset({frozenset({frozenset(),
+                                                                       frozenset({0})}),
+                                                            frozenset({frozenset({0,
+                                                                                  1}),
+                                                                       frozenset({0,
+                                                                                  1,
+                                                                                  2})}),
+                                                            frozenset({frozenset({0}),
+                                                                       frozenset({0,
+                                                                                  2})}),
+                                                            frozenset({frozenset({1}),
+                                                                       frozenset({0,
+                                                                                  1})})}),
+ frozenset({frozenset({2}), frozenset({0, 2})}): frozenset({frozenset({frozenset({0,
+                                                                                  2}),
+                                                                       frozenset({0,
+                                                                                  1,
+                                                                                  2})}),
+                                                            frozenset({frozenset({2}),
+                                                                       frozenset({1,
+                                                                                  2})}),
+                                                            frozenset({frozenset({0}),
+                                                                       frozenset({0,
+                                                                                  2})}),
+                                                            frozenset({frozenset(),
+                                                                       frozenset({2})})}),
+ frozenset({frozenset({0, 1, 2}), frozenset({0, 2})}): frozenset({frozenset({frozenset({1,
+                                                                                        2}),
+                                                                             frozenset({0,
+                                                                                        1,
+                                                                                        2})}),
+                                                                  frozenset({frozenset({0,
+                                                                                        1}),
+                                                                             frozenset({0,
+                                                                                        1,
+                                                                                        2})}),
+                                                                  frozenset({frozenset({0}),
+                                                                             frozenset({0,
+                                                                                        2})}),
+                                                                  frozenset({frozenset({2}),
+                                                                             frozenset({0,
+                                                                                        2})})}),
+ frozenset({frozenset({1, 2}), frozenset({0, 1, 2})}): frozenset({frozenset({frozenset({0,
+                                                                                        2}),
+                                                                             frozenset({0,
+                                                                                        1,
+                                                                                        2})}),
+                                                                  frozenset({frozenset({0,
+                                                                                        1}),
+                                                                             frozenset({0,
+                                                                                        1,
+                                                                                        2})}),
+                                                                  frozenset({frozenset({2}),
+                                                                             frozenset({1,
+                                                                                        2})}),
+                                                                  frozenset({frozenset({1}),
+                                                                             frozenset({1,
+                                                                                        2})})})}"""
+
+        cubo = test.test_set.linegraph(cube)
+        self.assertEqual(pprint.pformat(cubo), cubo_repr_tgt)
+
+    def test_depth(self) -> None:
+        nested_tuple = (1, (2, (3, (4, (5, 6)))))
+        nested_dict = {1: {2: {3: {4: {5: {6: 6}}}}}}
+        nested_list = [1, [2, [3, [4, [5, [6, []]]]]]]
+        self.assertEqual(pprint.pformat(nested_tuple), repr(nested_tuple))
+        self.assertEqual(pprint.pformat(nested_dict), repr(nested_dict))
+        self.assertEqual(pprint.pformat(nested_list), repr(nested_list))
+
+        lv1_tuple = '(1, (...))'
+        lv1_dict = '{1: {...}}'
+        lv1_list = '[1, [...]]'
+        self.assertEqual(pprint.pformat(nested_tuple, depth=1), lv1_tuple)
+        self.assertEqual(pprint.pformat(nested_dict, depth=1), lv1_dict)
+        self.assertEqual(pprint.pformat(nested_list, depth=1), lv1_list)
+
+    def test_sort_unorderable_values(self) -> None:
+        # Issue 3976:  sorted pprints fail for unorderable values.
+        n = 20
+        keys = [Unorderable() for i in range(n)]
+        random.shuffle(keys)
+        skeys = sorted(keys, key=id)
+        clean = lambda s: s.replace(' ', '').replace('\n','')  # type: Callable[[str], str]
+
+        self.assertEqual(clean(pprint.pformat(set(keys))),
+            '{' + ','.join(map(repr, skeys)) + '}')
+        self.assertEqual(clean(pprint.pformat(frozenset(keys))),
+            'frozenset({' + ','.join(map(repr, skeys)) + '})')
+        self.assertEqual(clean(pprint.pformat(dict.fromkeys(keys))),
+            '{' + ','.join('%r:None' % k for k in skeys) + '}')
+
+class DottedPrettyPrinter(pprint.PrettyPrinter):
+
+    def format(self, object: object, context: Dict[int, Any], maxlevels: int,
+               level: int) -> Tuple[str, int, int]:
+        if isinstance(object, str):
+            if ' ' in object:
+                return repr(object), 1, 0
+            else:
+                return object, 0, 0
+        else:
+            return pprint.PrettyPrinter.format(
+                self, object, context, maxlevels, level)
+
+
+def test_main() -> None:
+    test.support.run_unittest(QueryTestCase)
+
+
+if __name__ == "__main__":
+    test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_random.py b/test-data/stdlib-samples/3.2/test/test_random.py
new file mode 100644
index 0000000..5989cee
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_random.py
@@ -0,0 +1,533 @@
+#!/usr/bin/env python3
+
+import unittest
+import random
+import time
+import pickle
+import warnings
+from math import log, exp, pi, fsum, sin
+from test import support
+
+from typing import Any, Dict, List, Callable, Generic, TypeVar, cast
+
+RT = TypeVar('RT', random.Random, random.SystemRandom)
+
+class TestBasicOps(unittest.TestCase, Generic[RT]):
+    # Superclass with tests common to all generators.
+    # Subclasses must arrange for self.gen to retrieve the Random instance
+    # to be tested.
+
+    gen = None  # type: RT  # Either Random or SystemRandom
+
+    def randomlist(self, n: int) -> List[float]:
+        """Helper function to make a list of random numbers"""
+        return [self.gen.random() for i in range(n)]
+
+    def test_autoseed(self) -> None:
+        self.gen.seed()
+        state1 = self.gen.getstate()
+        time.sleep(0.1)
+        self.gen.seed()      # diffent seeds at different times
+        state2 = self.gen.getstate()
+        self.assertNotEqual(state1, state2)
+
+    def test_saverestore(self) -> None:
+        N = 1000
+        self.gen.seed()
+        state = self.gen.getstate()
+        randseq = self.randomlist(N)
+        self.gen.setstate(state)    # should regenerate the same sequence
+        self.assertEqual(randseq, self.randomlist(N))
+
+    def test_seedargs(self) -> None:
+        for arg in [None, 0, 0, 1, 1, -1, -1, 10**20, -(10**20),
+                    3.14, complex(1., 2.), 'a', tuple('abc')]:
+            self.gen.seed(arg)
+        for arg in [list(range(3)), {'one': 1}]:
+            self.assertRaises(TypeError, self.gen.seed, arg)
+        self.assertRaises(TypeError, self.gen.seed, 1, 2, 3, 4)
+        self.assertRaises(TypeError, type(self.gen), [])  # type: ignore  # mypy issue 1846
+
+    def test_choice(self) -> None:
+        choice = self.gen.choice
+        with self.assertRaises(IndexError):
+            choice([])
+        self.assertEqual(choice([50]), 50)
+        self.assertIn(choice([25, 75]), [25, 75])
+
+    def test_sample(self) -> None:
+        # For the entire allowable range of 0 <= k <= N, validate that
+        # the sample is of the correct length and contains only unique items
+        N = 100
+        population = range(N)
+        for k in range(N+1):
+            s = self.gen.sample(population, k)
+            self.assertEqual(len(s), k)
+            uniq = set(s)
+            self.assertEqual(len(uniq), k)
+            self.assertTrue(uniq <= set(population))
+        self.assertEqual(self.gen.sample([], 0), [])  # test edge case N==k==0
+
+    def test_sample_distribution(self) -> None:
+        # For the entire allowable range of 0 <= k <= N, validate that
+        # sample generates all possible permutations
+        n = 5
+        pop = range(n)
+        trials = 10000  # large num prevents false negatives without slowing normal case
+        def factorial(n: int) -> int:
+            if n == 0:
+                return 1
+            return n * factorial(n - 1)
+        for k in range(n):
+            expected = factorial(n) // factorial(n-k)
+            perms = {}  # type: Dict[tuple, object]
+            for i in range(trials):
+                perms[tuple(self.gen.sample(pop, k))] = None
+                if len(perms) == expected:
+                    break
+            else:
+                self.fail()
+
+    def test_sample_inputs(self) -> None:
+        # SF bug #801342 -- population can be any iterable defining __len__()
+        self.gen.sample(set(range(20)), 2)
+        self.gen.sample(range(20), 2)
+        self.gen.sample(range(20), 2)
+        self.gen.sample(str('abcdefghijklmnopqrst'), 2)
+        self.gen.sample(tuple('abcdefghijklmnopqrst'), 2)
+
+    def test_sample_on_dicts(self) -> None:
+        self.assertRaises(TypeError, self.gen.sample, dict.fromkeys('abcdef'), 2)
+
+    def test_gauss(self) -> None:
+        # Ensure that the seed() method initializes all the hidden state.  In
+        # particular, through 2.2.1 it failed to reset a piece of state used
+        # by (and only by) the .gauss() method.
+
+        for seed in 1, 12, 123, 1234, 12345, 123456, 654321:
+            self.gen.seed(seed)
+            x1 = self.gen.random()
+            y1 = self.gen.gauss(0, 1)
+
+            self.gen.seed(seed)
+            x2 = self.gen.random()
+            y2 = self.gen.gauss(0, 1)
+
+            self.assertEqual(x1, x2)
+            self.assertEqual(y1, y2)
+
+    def test_pickling(self) -> None:
+        state = pickle.dumps(self.gen)
+        origseq = [self.gen.random() for i in range(10)]
+        newgen = pickle.loads(state)
+        restoredseq = [newgen.random() for i in range(10)]
+        self.assertEqual(origseq, restoredseq)
+
+    def test_bug_1727780(self) -> None:
+        # verify that version-2-pickles can be loaded
+        # fine, whether they are created on 32-bit or 64-bit
+        # platforms, and that version-3-pickles load fine.
+        files = [("randv2_32.pck", 780),
+                 ("randv2_64.pck", 866),
+                 ("randv3.pck", 343)]
+        for file, value in files:
+            f = open(support.findfile(file),"rb")
+            r = pickle.load(f)
+            f.close()
+            self.assertEqual(int(r.random()*1000), value)
+
+    def test_bug_9025(self) -> None:
+        # Had problem with an uneven distribution in int(n*random())
+        # Verify the fix by checking that distributions fall within expectations.
+        n = 100000
+        randrange = self.gen.randrange
+        k = sum(randrange(6755399441055744) % 3 == 2 for i in range(n))
+        self.assertTrue(0.30 < k/n and k/n < .37, (k/n))
+
+class SystemRandom_TestBasicOps(TestBasicOps[random.SystemRandom]):
+    gen = random.SystemRandom()
+
+    def test_autoseed(self) -> None:
+        # Doesn't need to do anything except not fail
+        self.gen.seed()
+
+    def test_saverestore(self) -> None:
+        self.assertRaises(NotImplementedError, self.gen.getstate)
+        self.assertRaises(NotImplementedError, self.gen.setstate, None)
+
+    def test_seedargs(self) -> None:
+        # Doesn't need to do anything except not fail
+        self.gen.seed(100)
+
+    def test_gauss(self) -> None:
+        self.gen.gauss_next = None
+        self.gen.seed(100)
+        self.assertEqual(self.gen.gauss_next, None)
+
+    def test_pickling(self) -> None:
+        self.assertRaises(NotImplementedError, pickle.dumps, self.gen)
+
+    def test_53_bits_per_float(self) -> None:
+        # This should pass whenever a C double has 53 bit precision.
+        span = 2 ** 53 # type: int
+        cum = 0
+        for i in range(100):
+            cum |= int(self.gen.random() * span)
+        self.assertEqual(cum, span-1)
+
+    def test_bigrand(self) -> None:
+        # The randrange routine should build-up the required number of bits
+        # in stages so that all bit positions are active.
+        span = 2 ** 500 # type: int
+        cum = 0
+        for i in range(100):
+            r = self.gen.randrange(span)
+            self.assertTrue(0 <= r < span)
+            cum |= r
+        self.assertEqual(cum, span-1)
+
+    def test_bigrand_ranges(self) -> None:
+        for i in [40,80, 160, 200, 211, 250, 375, 512, 550]:
+            start = self.gen.randrange(2 ** i)
+            stop = self.gen.randrange(2 ** (i-2))
+            if stop <= start:
+                return
+            self.assertTrue(start <= self.gen.randrange(start, stop) < stop)
+
+    def test_rangelimits(self) -> None:
+        for start, stop in [(-2,0), (-(2**60)-2,-(2**60)), (2**60,2**60+2)]:
+            self.assertEqual(set(range(start,stop)),
+                set([self.gen.randrange(start,stop) for i in range(100)]))
+
+    def test_genrandbits(self) -> None:
+        # Verify ranges
+        for k in range(1, 1000):
+            self.assertTrue(0 <= self.gen.getrandbits(k) < 2**k)
+
+        # Verify all bits active
+        getbits = self.gen.getrandbits
+        for span in [1, 2, 3, 4, 31, 32, 32, 52, 53, 54, 119, 127, 128, 129]:
+            cum = 0
+            for i in range(100):
+                cum |= getbits(span)
+            self.assertEqual(cum, 2**span-1)
+
+        # Verify argument checking
+        self.assertRaises(TypeError, self.gen.getrandbits)
+        self.assertRaises(TypeError, self.gen.getrandbits, 1, 2)
+        self.assertRaises(ValueError, self.gen.getrandbits, 0)
+        self.assertRaises(ValueError, self.gen.getrandbits, -1)
+        self.assertRaises(TypeError, self.gen.getrandbits, 10.1)
+
+    def test_randbelow_logic(self, _log: Callable[[float, float], float] = log,
+                             int: Callable[[float], int] = int) -> None:
+        # check bitcount transition points:  2**i and 2**(i+1)-1
+        # show that: k = int(1.001 + _log(n, 2))
+        # is equal to or one greater than the number of bits in n
+        for i in range(1, 1000):
+            n = 1 << i # check an exact power of two
+            numbits = i+1
+            k = int(1.00001 + _log(n, 2))
+            self.assertEqual(k, numbits)
+            self.assertEqual(n, 2**(k-1))
+
+            n += n - 1      # check 1 below the next power of two
+            k = int(1.00001 + _log(n, 2))
+            self.assertIn(k, [numbits, numbits+1])
+            self.assertTrue(2**k > n > 2**(k-2))
+
+            n -= n >> 15     # check a little farther below the next power of two
+            k = int(1.00001 + _log(n, 2))
+            self.assertEqual(k, numbits)        # note the stronger assertion
+            self.assertTrue(2**k > n > 2**(k-1))   # note the stronger assertion
+
+
+class MersenneTwister_TestBasicOps(TestBasicOps[random.Random]):
+    gen = random.Random()
+
+    def test_guaranteed_stable(self) -> None:
+        # These sequences are guaranteed to stay the same across versions of python
+        self.gen.seed(3456147, version=1)
+        self.assertEqual([self.gen.random().hex() for i in range(4)],
+            ['0x1.ac362300d90d2p-1', '0x1.9d16f74365005p-1',
+             '0x1.1ebb4352e4c4dp-1', '0x1.1a7422abf9c11p-1'])
+        self.gen.seed("the quick brown fox", version=2)
+        self.assertEqual([self.gen.random().hex() for i in range(4)],
+            ['0x1.1239ddfb11b7cp-3', '0x1.b3cbb5c51b120p-4',
+             '0x1.8c4f55116b60fp-1', '0x1.63eb525174a27p-1'])
+
+    def test_setstate_first_arg(self) -> None:
+        self.assertRaises(ValueError, self.gen.setstate, (1, None, None))
+
+    def test_setstate_middle_arg(self) -> None:
+        # Wrong type, s/b tuple
+        self.assertRaises(TypeError, self.gen.setstate, (2, None, None))
+        # Wrong length, s/b 625
+        self.assertRaises(ValueError, self.gen.setstate, (2, (1,2,3), None))
+        # Wrong type, s/b tuple of 625 ints
+        self.assertRaises(TypeError, self.gen.setstate, (2, tuple(['a',]*625), None))
+        # Last element s/b an int also
+        self.assertRaises(TypeError, self.gen.setstate, (2, cast(Any, (0,))*624+('a',), None))
+
+    def test_referenceImplementation(self) -> None:
+        # Compare the python implementation with results from the original
+        # code.  Create 2000 53-bit precision random floats.  Compare only
+        # the last ten entries to show that the independent implementations
+        # are tracking.  Here is the main() function needed to create the
+        # list of expected random numbers:
+        #    void main(void){
+        #         int i;
+        #         unsigned long init[4]={61731, 24903, 614, 42143}, length=4;
+        #         init_by_array(init, length);
+        #         for (i=0; i<2000; i++) {
+        #           printf("%.15f ", genrand_res53());
+        #           if (i%5==4) printf("\n");
+        #         }
+        #     }
+        expected = [0.45839803073713259,
+                    0.86057815201978782,
+                    0.92848331726782152,
+                    0.35932681119782461,
+                    0.081823493762449573,
+                    0.14332226470169329,
+                    0.084297823823520024,
+                    0.53814864671831453,
+                    0.089215024911993401,
+                    0.78486196105372907]
+
+        self.gen.seed(61731 + (24903<<32) + (614<<64) + (42143<<96))
+        actual = self.randomlist(2000)[-10:]
+        for a, e in zip(actual, expected):
+            self.assertAlmostEqual(a,e,places=14)
+
+    def test_strong_reference_implementation(self) -> None:
+        # Like test_referenceImplementation, but checks for exact bit-level
+        # equality.  This should pass on any box where C double contains
+        # at least 53 bits of precision (the underlying algorithm suffers
+        # no rounding errors -- all results are exact).
+        from math import ldexp
+
+        expected = [0x0eab3258d2231f,
+                    0x1b89db315277a5,
+                    0x1db622a5518016,
+                    0x0b7f9af0d575bf,
+                    0x029e4c4db82240,
+                    0x04961892f5d673,
+                    0x02b291598e4589,
+                    0x11388382c15694,
+                    0x02dad977c9e1fe,
+                    0x191d96d4d334c6]
+        self.gen.seed(61731 + (24903<<32) + (614<<64) + (42143<<96))
+        actual = self.randomlist(2000)[-10:]
+        for a, e in zip(actual, expected):
+            self.assertEqual(int(ldexp(a, 53)), e)
+
+    def test_long_seed(self) -> None:
+        # This is most interesting to run in debug mode, just to make sure
+        # nothing blows up.  Under the covers, a dynamically resized array
+        # is allocated, consuming space proportional to the number of bits
+        # in the seed.  Unfortunately, that's a quadratic-time algorithm,
+        # so don't make this horribly big.
+        seed = (1 << (10000 * 8)) - 1  # about 10K bytes
+        self.gen.seed(seed)
+
+    def test_53_bits_per_float(self) -> None:
+        # This should pass whenever a C double has 53 bit precision.
+        span = 2 ** 53 # type: int
+        cum = 0
+        for i in range(100):
+            cum |= int(self.gen.random() * span)
+        self.assertEqual(cum, span-1)
+
+    def test_bigrand(self) -> None:
+        # The randrange routine should build-up the required number of bits
+        # in stages so that all bit positions are active.
+        span = 2 ** 500 # type: int
+        cum = 0
+        for i in range(100):
+            r = self.gen.randrange(span)
+            self.assertTrue(0 <= r < span)
+            cum |= r
+        self.assertEqual(cum, span-1)
+
+    def test_bigrand_ranges(self) -> None:
+        for i in [40,80, 160, 200, 211, 250, 375, 512, 550]:
+            start = self.gen.randrange(2 ** i)
+            stop = self.gen.randrange(2 ** (i-2))
+            if stop <= start:
+                return
+            self.assertTrue(start <= self.gen.randrange(start, stop) < stop)
+
+    def test_rangelimits(self) -> None:
+        for start, stop in [(-2,0), (-(2**60)-2,-(2**60)), (2**60,2**60+2)]:
+            self.assertEqual(set(range(start,stop)),
+                set([self.gen.randrange(start,stop) for i in range(100)]))
+
+    def test_genrandbits(self) -> None:
+        # Verify cross-platform repeatability
+        self.gen.seed(1234567)
+        self.assertEqual(self.gen.getrandbits(100),
+                         97904845777343510404718956115)
+        # Verify ranges
+        for k in range(1, 1000):
+            self.assertTrue(0 <= self.gen.getrandbits(k) < 2**k)
+
+        # Verify all bits active
+        getbits = self.gen.getrandbits
+        for span in [1, 2, 3, 4, 31, 32, 32, 52, 53, 54, 119, 127, 128, 129]:
+            cum = 0
+            for i in range(100):
+                cum |= getbits(span)
+            self.assertEqual(cum, 2**span-1)
+
+        # Verify argument checking
+        self.assertRaises(TypeError, self.gen.getrandbits)
+        self.assertRaises(TypeError, self.gen.getrandbits, 'a')
+        self.assertRaises(TypeError, self.gen.getrandbits, 1, 2)
+        self.assertRaises(ValueError, self.gen.getrandbits, 0)
+        self.assertRaises(ValueError, self.gen.getrandbits, -1)
+
+    def test_randbelow_logic(self,
+                             _log: Callable[[int, float], float] = log,
+                             int: Callable[[float], int] = int) -> None:
+        # check bitcount transition points:  2**i and 2**(i+1)-1
+        # show that: k = int(1.001 + _log(n, 2))
+        # is equal to or one greater than the number of bits in n
+        for i in range(1, 1000):
+            n = 1 << i # check an exact power of two
+            numbits = i+1
+            k = int(1.00001 + _log(n, 2))
+            self.assertEqual(k, numbits)
+            self.assertEqual(n, 2**(k-1))
+
+            n += n - 1      # check 1 below the next power of two
+            k = int(1.00001 + _log(n, 2))
+            self.assertIn(k, [numbits, numbits+1])
+            self.assertTrue(2**k > n > 2**(k-2))
+
+            n -= n >> 15     # check a little farther below the next power of two
+            k = int(1.00001 + _log(n, 2))
+            self.assertEqual(k, numbits)        # note the stronger assertion
+            self.assertTrue(2**k > n > 2**(k-1))   # note the stronger assertion
+
+    def test_randrange_bug_1590891(self) -> None:
+        start = 1000000000000
+        stop = -100000000000000000000
+        step = -200
+        x = self.gen.randrange(start, stop, step)
+        self.assertTrue(stop < x <= start)
+        self.assertEqual((x+stop)%step, 0)
+
+def gamma(z: float, sqrt2pi: float = (2.0*pi)**0.5) -> float:
+    # Reflection to right half of complex plane
+    if z < 0.5:
+        return pi / sin(pi*z) / gamma(1.0-z)
+    # Lanczos approximation with g=7
+    az = z + (7.0 - 0.5)
+    return az ** (z-0.5) / exp(az) * sqrt2pi * fsum([
+        0.9999999999995183,
+        676.5203681218835 / z,
+        -1259.139216722289 / (z+1.0),
+        771.3234287757674 / (z+2.0),
+        -176.6150291498386 / (z+3.0),
+        12.50734324009056 / (z+4.0),
+        -0.1385710331296526 / (z+5.0),
+        0.9934937113930748e-05 / (z+6.0),
+        0.1659470187408462e-06 / (z+7.0),
+    ])
+
+class TestDistributions(unittest.TestCase):
+    def test_zeroinputs(self) -> None:
+        # Verify that distributions can handle a series of zero inputs'
+        g = random.Random()
+        x = [g.random() for i in range(50)] + [0.0]*5
+        def patch() -> None:
+            setattr(g, 'random', x[:].pop)
+        patch(); g.uniform(1.0,10.0)
+        patch(); g.paretovariate(1.0)
+        patch(); g.expovariate(1.0)
+        patch(); g.weibullvariate(1.0, 1.0)
+        patch(); g.normalvariate(0.0, 1.0)
+        patch(); g.gauss(0.0, 1.0)
+        patch(); g.lognormvariate(0.0, 1.0)
+        patch(); g.vonmisesvariate(0.0, 1.0)
+        patch(); g.gammavariate(0.01, 1.0)
+        patch(); g.gammavariate(1.0, 1.0)
+        patch(); g.gammavariate(200.0, 1.0)
+        patch(); g.betavariate(3.0, 3.0)
+        patch(); g.triangular(0.0, 1.0, 1.0/3.0)
+
+    def test_avg_std(self) -> None:
+        # Use integration to test distribution average and standard deviation.
+        # Only works for distributions which do not consume variates in pairs
+        g = random.Random()
+        N = 5000
+        x = [i/float(N) for i in range(1,N)]
+        variate = None  # type: Any
+        for variate, args, mu, sigmasqrd in [
+                (g.uniform, (1.0,10.0), (10.0+1.0)/2, (10.0-1.0)**2/12),
+                (g.triangular, (0.0, 1.0, 1.0/3.0), 4.0/9.0, 7.0/9.0/18.0),
+                (g.expovariate, (1.5,), 1/1.5, 1/1.5**2),
+                (g.paretovariate, (5.0,), 5.0/(5.0-1),
+                                  5.0/((5.0-1)**2*(5.0-2))),
+                (g.weibullvariate, (1.0, 3.0), gamma(1+1/3.0),
+                                  gamma(1+2/3.0)-gamma(1+1/3.0)**2) ]:
+            setattr(g, 'random', x[:].pop)
+            y = []  # type: List[float]
+            for i in range(len(x)):
+                try:
+                    y.append(variate(*args))
+                except IndexError:
+                    pass
+            s1 = s2 = 0.0
+            for e in y:
+                s1 += e
+                s2 += (e - mu) ** 2
+            N = len(y)
+            self.assertAlmostEqual(s1/N, mu, places=2)
+            self.assertAlmostEqual(s2/(N-1), sigmasqrd, places=2)
+
+class TestModule(unittest.TestCase):
+    def testMagicConstants(self) -> None:
+        self.assertAlmostEqual(random.NV_MAGICCONST, 1.71552776992141)
+        self.assertAlmostEqual(random.TWOPI, 6.28318530718)
+        self.assertAlmostEqual(random.LOG4, 1.38629436111989)
+        self.assertAlmostEqual(random.SG_MAGICCONST, 2.50407739677627)
+
+    def test__all__(self) -> None:
+        # tests validity but not completeness of the __all__ list
+        self.assertTrue(set(random.__all__) <= set(dir(random)))
+
+    def test_random_subclass_with_kwargs(self) -> None:
+        # SF bug #1486663 -- this used to erroneously raise a TypeError
+        class Subclass(random.Random):
+            def __init__(self, newarg: object = None) -> None:
+                random.Random.__init__(self)
+        Subclass(newarg=1)
+
+
+def test_main(verbose: bool = None) -> None:
+    testclasses =    [MersenneTwister_TestBasicOps,
+                      TestDistributions,
+                      TestModule]
+
+    try:
+        random.SystemRandom().random()
+    except NotImplementedError:
+        pass
+    else:
+        testclasses.append(SystemRandom_TestBasicOps)
+
+    support.run_unittest(*testclasses)
+
+    # verify reference counting
+    import sys
+    if verbose and hasattr(sys, "gettotalrefcount"):
+        counts = [None] * 5 # type: List[int]
+        for i in range(len(counts)):
+            support.run_unittest(*testclasses)
+            counts[i] = sys.gettotalrefcount()
+        print(counts)
+
+if __name__ == "__main__":
+    test_main(verbose=True)
diff --git a/test-data/stdlib-samples/3.2/test/test_set.py b/test-data/stdlib-samples/3.2/test/test_set.py
new file mode 100644
index 0000000..23ae745
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_set.py
@@ -0,0 +1,1884 @@
+import unittest
+from test import support
+import gc
+import weakref
+import operator
+import copy
+import pickle
+from random import randrange, shuffle
+import sys
+import warnings
+import collections
+from typing import Set, Any
+
+class PassThru(Exception):
+    pass
+
+def check_pass_thru():
+    raise PassThru
+    yield 1
+
+class BadCmp:
+    def __hash__(self):
+        return 1
+    def __eq__(self, other):
+        raise RuntimeError
+
+class ReprWrapper:
+    'Used to test self-referential repr() calls'
+    def __repr__(self):
+        return repr(self.value)
+
+#class HashCountingInt(int):
+#    'int-like object that counts the number of times __hash__ is called'
+#    def __init__(self, *args):
+#        self.hash_count = 0
+#    def __hash__(self):
+#        self.hash_count += 1
+#        return int.__hash__(self)
+
+class TestJointOps(unittest.TestCase):
+    # Tests common to both set and frozenset
+
+    def setUp(self):
+        self.word = word = 'simsalabim'
+        self.otherword = 'madagascar'
+        self.letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
+        self.s = self.thetype(word)
+        self.d = dict.fromkeys(word)
+
+    def test_new_or_init(self):
+        self.assertRaises(TypeError, self.thetype, [], 2)
+        self.assertRaises(TypeError, set().__init__, a=1)
+
+    def test_uniquification(self):
+        actual = sorted(self.s)
+        expected = sorted(self.d)
+        self.assertEqual(actual, expected)
+        self.assertRaises(PassThru, self.thetype, check_pass_thru())
+        self.assertRaises(TypeError, self.thetype, [[]])
+
+    def test_len(self):
+        self.assertEqual(len(self.s), len(self.d))
+
+    def test_contains(self):
+        for c in self.letters:
+            self.assertEqual(c in self.s, c in self.d)
+        self.assertRaises(TypeError, self.s.__contains__, [[]])
+        s = self.thetype([frozenset(self.letters)])
+        self.assertIn(self.thetype(self.letters), s)
+
+    def test_union(self):
+        u = self.s.union(self.otherword)
+        for c in self.letters:
+            self.assertEqual(c in u, c in self.d or c in self.otherword)
+        self.assertEqual(self.s, self.thetype(self.word))
+        self.assertEqual(type(u), self.basetype)
+        self.assertRaises(PassThru, self.s.union, check_pass_thru())
+        self.assertRaises(TypeError, self.s.union, [[]])
+        for C in set, frozenset, dict.fromkeys, str, list, tuple:
+            self.assertEqual(self.thetype('abcba').union(C('cdc')), set('abcd'))
+            self.assertEqual(self.thetype('abcba').union(C('efgfe')), set('abcefg'))
+            self.assertEqual(self.thetype('abcba').union(C('ccb')), set('abc'))
+            self.assertEqual(self.thetype('abcba').union(C('ef')), set('abcef'))
+            self.assertEqual(self.thetype('abcba').union(C('ef'), C('fg')), set('abcefg'))
+
+        # Issue #6573
+        x = self.thetype()
+        self.assertEqual(x.union(set([1]), x, set([2])), self.thetype([1, 2]))
+
+    def test_or(self):
+        i = self.s.union(self.otherword)
+        self.assertEqual(self.s | set(self.otherword), i)
+        self.assertEqual(self.s | frozenset(self.otherword), i)
+        try:
+            self.s | self.otherword
+        except TypeError:
+            pass
+        else:
+            self.fail("s|t did not screen-out general iterables")
+
+    def test_intersection(self):
+        i = self.s.intersection(self.otherword)
+        for c in self.letters:
+            self.assertEqual(c in i, c in self.d and c in self.otherword)
+        self.assertEqual(self.s, self.thetype(self.word))
+        self.assertEqual(type(i), self.basetype)
+        self.assertRaises(PassThru, self.s.intersection, check_pass_thru())
+        for C in set, frozenset, dict.fromkeys, str, list, tuple:
+            self.assertEqual(self.thetype('abcba').intersection(C('cdc')), set('cc'))
+            self.assertEqual(self.thetype('abcba').intersection(C('efgfe')), set(''))
+            self.assertEqual(self.thetype('abcba').intersection(C('ccb')), set('bc'))
+            self.assertEqual(self.thetype('abcba').intersection(C('ef')), set(''))
+            self.assertEqual(self.thetype('abcba').intersection(C('cbcf'), C('bag')), set('b'))
+        s = self.thetype('abcba')
+        z = s.intersection()
+        if self.thetype == frozenset():
+            self.assertEqual(id(s), id(z))
+        else:
+            self.assertNotEqual(id(s), id(z))
+
+    def test_isdisjoint(self):
+        def f(s1, s2):
+            'Pure python equivalent of isdisjoint()'
+            return not set(s1).intersection(s2)
+        for larg in '', 'a', 'ab', 'abc', 'ababac', 'cdc', 'cc', 'efgfe', 'ccb', 'ef':
+            s1 = self.thetype(larg)
+            for rarg in '', 'a', 'ab', 'abc', 'ababac', 'cdc', 'cc', 'efgfe', 'ccb', 'ef':
+                for C in set, frozenset, dict.fromkeys, str, list, tuple:
+                    s2 = C(rarg)
+                    actual = s1.isdisjoint(s2)
+                    expected = f(s1, s2)
+                    self.assertEqual(actual, expected)
+                    self.assertTrue(actual is True or actual is False)
+
+    def test_and(self):
+        i = self.s.intersection(self.otherword)
+        self.assertEqual(self.s & set(self.otherword), i)
+        self.assertEqual(self.s & frozenset(self.otherword), i)
+        try:
+            self.s & self.otherword
+        except TypeError:
+            pass
+        else:
+            self.fail("s&t did not screen-out general iterables")
+
+    def test_difference(self):
+        i = self.s.difference(self.otherword)
+        for c in self.letters:
+            self.assertEqual(c in i, c in self.d and c not in self.otherword)
+        self.assertEqual(self.s, self.thetype(self.word))
+        self.assertEqual(type(i), self.basetype)
+        self.assertRaises(PassThru, self.s.difference, check_pass_thru())
+        self.assertRaises(TypeError, self.s.difference, [[]])
+        for C in set, frozenset, dict.fromkeys, str, list, tuple:
+            self.assertEqual(self.thetype('abcba').difference(C('cdc')), set('ab'))
+            self.assertEqual(self.thetype('abcba').difference(C('efgfe')), set('abc'))
+            self.assertEqual(self.thetype('abcba').difference(C('ccb')), set('a'))
+            self.assertEqual(self.thetype('abcba').difference(C('ef')), set('abc'))
+            self.assertEqual(self.thetype('abcba').difference(), set('abc'))
+            self.assertEqual(self.thetype('abcba').difference(C('a'), C('b')), set('c'))
+
+    def test_sub(self):
+        i = self.s.difference(self.otherword)
+        self.assertEqual(self.s - set(self.otherword), i)
+        self.assertEqual(self.s - frozenset(self.otherword), i)
+        try:
+            self.s - self.otherword
+        except TypeError:
+            pass
+        else:
+            self.fail("s-t did not screen-out general iterables")
+
+    def test_symmetric_difference(self):
+        i = self.s.symmetric_difference(self.otherword)
+        for c in self.letters:
+            self.assertEqual(c in i, (c in self.d) ^ (c in self.otherword))
+        self.assertEqual(self.s, self.thetype(self.word))
+        self.assertEqual(type(i), self.basetype)
+        self.assertRaises(PassThru, self.s.symmetric_difference, check_pass_thru())
+        self.assertRaises(TypeError, self.s.symmetric_difference, [[]])
+        for C in set, frozenset, dict.fromkeys, str, list, tuple:
+            self.assertEqual(self.thetype('abcba').symmetric_difference(C('cdc')), set('abd'))
+            self.assertEqual(self.thetype('abcba').symmetric_difference(C('efgfe')), set('abcefg'))
+            self.assertEqual(self.thetype('abcba').symmetric_difference(C('ccb')), set('a'))
+            self.assertEqual(self.thetype('abcba').symmetric_difference(C('ef')), set('abcef'))
+
+    def test_xor(self):
+        i = self.s.symmetric_difference(self.otherword)
+        self.assertEqual(self.s ^ set(self.otherword), i)
+        self.assertEqual(self.s ^ frozenset(self.otherword), i)
+        try:
+            self.s ^ self.otherword
+        except TypeError:
+            pass
+        else:
+            self.fail("s^t did not screen-out general iterables")
+
+    def test_equality(self):
+        self.assertEqual(self.s, set(self.word))
+        self.assertEqual(self.s, frozenset(self.word))
+        self.assertEqual(self.s == self.word, False)
+        self.assertNotEqual(self.s, set(self.otherword))
+        self.assertNotEqual(self.s, frozenset(self.otherword))
+        self.assertEqual(self.s != self.word, True)
+
+    def test_setOfFrozensets(self):
+        t = map(frozenset, ['abcdef', 'bcd', 'bdcb', 'fed', 'fedccba'])
+        s = self.thetype(t)
+        self.assertEqual(len(s), 3)
+
+    def test_sub_and_super(self):
+        p, q, r = map(self.thetype, ['ab', 'abcde', 'def'])
+        self.assertTrue(p < q)
+        self.assertTrue(p <= q)
+        self.assertTrue(q <= q)
+        self.assertTrue(q > p)
+        self.assertTrue(q >= p)
+        self.assertFalse(q < r)
+        self.assertFalse(q <= r)
+        self.assertFalse(q > r)
+        self.assertFalse(q >= r)
+        self.assertTrue(set('a').issubset('abc'))
+        self.assertTrue(set('abc').issuperset('a'))
+        self.assertFalse(set('a').issubset('cbs'))
+        self.assertFalse(set('cbs').issuperset('a'))
+
+    def test_pickling(self):
+        for i in range(pickle.HIGHEST_PROTOCOL + 1):
+            p = pickle.dumps(self.s, i)
+            dup = pickle.loads(p)
+            self.assertEqual(self.s, dup, "%s != %s" % (self.s, dup))
+            if type(self.s) not in (set, frozenset):
+                self.s.x = 10
+                p = pickle.dumps(self.s)
+                dup = pickle.loads(p)
+                self.assertEqual(self.s.x, dup.x)
+
+    def test_deepcopy(self):
+        class Tracer:
+            def __init__(self, value):
+                self.value = value
+            def __hash__(self):
+                return self.value
+            def __deepcopy__(self, memo=None):
+                return Tracer(self.value + 1)
+        t = Tracer(10)
+        s = self.thetype([t])
+        dup = copy.deepcopy(s)
+        self.assertNotEqual(id(s), id(dup))
+        for elem in dup:
+            newt = elem
+        self.assertNotEqual(id(t), id(newt))
+        self.assertEqual(t.value + 1, newt.value)
+
+    def test_gc(self):
+        # Create a nest of cycles to exercise overall ref count check
+        class A:
+            pass
+        s = set(A() for i in range(1000))
+        for elem in s:
+            elem.cycle = s
+            elem.sub = elem
+            elem.set = set([elem])
+
+    def test_subclass_with_custom_hash(self):
+        raise NotImplementedError() # runtime computed base class below
+        # Bug #1257731
+        class H: # (self.thetype):
+            def __hash__(self):
+                return int(id(self) & 0x7fffffff)
+        s=H()
+        f=set()
+        f.add(s)
+        self.assertIn(s, f)
+        f.remove(s)
+        f.add(s)
+        f.discard(s)
+
+    def test_badcmp(self):
+        s = self.thetype([BadCmp()])
+        # Detect comparison errors during insertion and lookup
+        self.assertRaises(RuntimeError, self.thetype, [BadCmp(), BadCmp()])
+        self.assertRaises(RuntimeError, s.__contains__, BadCmp())
+        # Detect errors during mutating operations
+        if hasattr(s, 'add'):
+            self.assertRaises(RuntimeError, s.add, BadCmp())
+            self.assertRaises(RuntimeError, s.discard, BadCmp())
+            self.assertRaises(RuntimeError, s.remove, BadCmp())
+
+    def test_cyclical_repr(self):
+        w = ReprWrapper()
+        s = self.thetype([w])
+        w.value = s
+        if self.thetype == set:
+            self.assertEqual(repr(s), '{set(...)}')
+        else:
+            name = repr(s).partition('(')[0]    # strip class name
+            self.assertEqual(repr(s), '%s({%s(...)})' % (name, name))
+
+    def test_cyclical_print(self):
+        w = ReprWrapper()
+        s = self.thetype([w])
+        w.value = s
+        fo = open(support.TESTFN, "w")
+        try:
+            fo.write(str(s))
+            fo.close()
+            fo = open(support.TESTFN, "r")
+            self.assertEqual(fo.read(), repr(s))
+        finally:
+            fo.close()
+            support.unlink(support.TESTFN)
+
+    def test_do_not_rehash_dict_keys(self):
+        raise NotImplementedError() # cannot subclass int
+        n = 10
+        d = None # dict.fromkeys(map(HashCountingInt, range(n)))
+        self.assertEqual(sum(elem.hash_count for elem in d), n)
+        s = self.thetype(d)
+        self.assertEqual(sum(elem.hash_count for elem in d), n)
+        s.difference(d)
+        self.assertEqual(sum(elem.hash_count for elem in d), n)
+        if hasattr(s, 'symmetric_difference_update'):
+            s.symmetric_difference_update(d)
+        self.assertEqual(sum(elem.hash_count for elem in d), n)
+        d2 = dict.fromkeys(set(d))
+        self.assertEqual(sum(elem.hash_count for elem in d), n)
+        d3 = dict.fromkeys(frozenset(d))
+        self.assertEqual(sum(elem.hash_count for elem in d), n)
+        d3 = dict.fromkeys(frozenset(d), 123)
+        self.assertEqual(sum(elem.hash_count for elem in d), n)
+        self.assertEqual(d3, dict.fromkeys(d, 123))
+
+    def test_container_iterator(self):
+        # Bug #3680: tp_traverse was not implemented for set iterator object
+        class C(object):
+            pass
+        obj = C()
+        ref = weakref.ref(obj)
+        container = set([obj, 1])
+        obj.x = iter(container)
+        obj = None
+        container = None
+        gc.collect()
+        self.assertTrue(ref() is None, "Cycle was not collected")
+
+class TestSet(TestJointOps):
+    thetype = set
+    basetype = set
+
+    def test_init(self):
+        s = self.thetype()
+        s.__init__(self.word)
+        self.assertEqual(s, set(self.word))
+        s.__init__(self.otherword)
+        self.assertEqual(s, set(self.otherword))
+        self.assertRaises(TypeError, s.__init__, s, 2);
+        self.assertRaises(TypeError, s.__init__, 1)
+
+    def test_constructor_identity(self):
+        s = self.thetype(range(3))
+        t = self.thetype(s)
+        self.assertNotEqual(id(s), id(t))
+
+    def test_set_literal(self):
+        raise NotImplementedError()
+        #s = set([1,2,3])
+        #t = {1,2,3}
+        #self.assertEqual(s, t)
+
+    def test_hash(self):
+        self.assertRaises(TypeError, hash, self.s)
+
+    def test_clear(self):
+        self.s.clear()
+        self.assertEqual(self.s, set())
+        self.assertEqual(len(self.s), 0)
+
+    def test_copy(self):
+        dup = self.s.copy()
+        self.assertEqual(self.s, dup)
+        self.assertNotEqual(id(self.s), id(dup))
+        self.assertEqual(type(dup), self.basetype)
+
+    def test_add(self):
+        self.s.add('Q')
+        self.assertIn('Q', self.s)
+        dup = self.s.copy()
+        self.s.add('Q')
+        self.assertEqual(self.s, dup)
+        self.assertRaises(TypeError, self.s.add, [])
+
+    def test_remove(self):
+        self.s.remove('a')
+        self.assertNotIn('a', self.s)
+        self.assertRaises(KeyError, self.s.remove, 'Q')
+        self.assertRaises(TypeError, self.s.remove, [])
+        s = self.thetype([frozenset(self.word)])
+        self.assertIn(self.thetype(self.word), s)
+        s.remove(self.thetype(self.word))
+        self.assertNotIn(self.thetype(self.word), s)
+        self.assertRaises(KeyError, self.s.remove, self.thetype(self.word))
+
+    def test_remove_keyerror_unpacking(self):
+        # bug:  www.python.org/sf/1576657
+        for v1 in ['Q', (1,)]:
+            try:
+                self.s.remove(v1)
+            except KeyError as e:
+                v2 = e.args[0]
+                self.assertEqual(v1, v2)
+            else:
+                self.fail()
+
+    def test_remove_keyerror_set(self):
+        key = self.thetype([3, 4])
+        try:
+            self.s.remove(key)
+        except KeyError as e:
+            self.assertTrue(e.args[0] is key,
+                         "KeyError should be {0}, not {1}".format(key,
+                                                                  e.args[0]))
+        else:
+            self.fail()
+
+    def test_discard(self):
+        self.s.discard('a')
+        self.assertNotIn('a', self.s)
+        self.s.discard('Q')
+        self.assertRaises(TypeError, self.s.discard, [])
+        s = self.thetype([frozenset(self.word)])
+        self.assertIn(self.thetype(self.word), s)
+        s.discard(self.thetype(self.word))
+        self.assertNotIn(self.thetype(self.word), s)
+        s.discard(self.thetype(self.word))
+
+    def test_pop(self):
+        for i in range(len(self.s)):
+            elem = self.s.pop()
+            self.assertNotIn(elem, self.s)
+        self.assertRaises(KeyError, self.s.pop)
+
+    def test_update(self):
+        retval = self.s.update(self.otherword)
+        self.assertEqual(retval, None)
+        for c in (self.word + self.otherword):
+            self.assertIn(c, self.s)
+        self.assertRaises(PassThru, self.s.update, check_pass_thru())
+        self.assertRaises(TypeError, self.s.update, [[]])
+        for p, q in (('cdc', 'abcd'), ('efgfe', 'abcefg'), ('ccb', 'abc'), ('ef', 'abcef')):
+            for C in set, frozenset, dict.fromkeys, str, list, tuple:
+                s = self.thetype('abcba')
+                self.assertEqual(s.update(C(p)), None)
+                self.assertEqual(s, set(q))
+        for p in ('cdc', 'efgfe', 'ccb', 'ef', 'abcda'):
+            q = 'ahi'
+            for C in set, frozenset, dict.fromkeys, str, list, tuple:
+                s = self.thetype('abcba')
+                self.assertEqual(s.update(C(p), C(q)), None)
+                self.assertEqual(s, set(s) | set(p) | set(q))
+
+    def test_ior(self):
+        self.s |= set(self.otherword)
+        for c in (self.word + self.otherword):
+            self.assertIn(c, self.s)
+
+    def test_intersection_update(self):
+        retval = self.s.intersection_update(self.otherword)
+        self.assertEqual(retval, None)
+        for c in (self.word + self.otherword):
+            if c in self.otherword and c in self.word:
+                self.assertIn(c, self.s)
+            else:
+                self.assertNotIn(c, self.s)
+        self.assertRaises(PassThru, self.s.intersection_update, check_pass_thru())
+        self.assertRaises(TypeError, self.s.intersection_update, [[]])
+        for p, q in (('cdc', 'c'), ('efgfe', ''), ('ccb', 'bc'), ('ef', '')):
+            for C in set, frozenset, dict.fromkeys, str, list, tuple:
+                s = self.thetype('abcba')
+                self.assertEqual(s.intersection_update(C(p)), None)
+                self.assertEqual(s, set(q))
+                ss = 'abcba'
+                s = self.thetype(ss)
+                t = 'cbc'
+                self.assertEqual(s.intersection_update(C(p), C(t)), None)
+                self.assertEqual(s, set('abcba')&set(p)&set(t))
+
+    def test_iand(self):
+        self.s &= set(self.otherword)
+        for c in (self.word + self.otherword):
+            if c in self.otherword and c in self.word:
+                self.assertIn(c, self.s)
+            else:
+                self.assertNotIn(c, self.s)
+
+    def test_difference_update(self):
+        retval = self.s.difference_update(self.otherword)
+        self.assertEqual(retval, None)
+        for c in (self.word + self.otherword):
+            if c in self.word and c not in self.otherword:
+                self.assertIn(c, self.s)
+            else:
+                self.assertNotIn(c, self.s)
+        self.assertRaises(PassThru, self.s.difference_update, check_pass_thru())
+        self.assertRaises(TypeError, self.s.difference_update, [[]])
+        self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])
+        for p, q in (('cdc', 'ab'), ('efgfe', 'abc'), ('ccb', 'a'), ('ef', 'abc')):
+            for C in set, frozenset, dict.fromkeys, str, list, tuple:
+                s = self.thetype('abcba')
+                self.assertEqual(s.difference_update(C(p)), None)
+                self.assertEqual(s, set(q))
+
+                s = self.thetype('abcdefghih')
+                s.difference_update()
+                self.assertEqual(s, self.thetype('abcdefghih'))
+
+                s = self.thetype('abcdefghih')
+                s.difference_update(C('aba'))
+                self.assertEqual(s, self.thetype('cdefghih'))
+
+                s = self.thetype('abcdefghih')
+                s.difference_update(C('cdc'), C('aba'))
+                self.assertEqual(s, self.thetype('efghih'))
+
+    def test_isub(self):
+        self.s -= set(self.otherword)
+        for c in (self.word + self.otherword):
+            if c in self.word and c not in self.otherword:
+                self.assertIn(c, self.s)
+            else:
+                self.assertNotIn(c, self.s)
+
+    def test_symmetric_difference_update(self):
+        retval = self.s.symmetric_difference_update(self.otherword)
+        self.assertEqual(retval, None)
+        for c in (self.word + self.otherword):
+            if (c in self.word) ^ (c in self.otherword):
+                self.assertIn(c, self.s)
+            else:
+                self.assertNotIn(c, self.s)
+        self.assertRaises(PassThru, self.s.symmetric_difference_update, check_pass_thru())
+        self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])
+        for p, q in (('cdc', 'abd'), ('efgfe', 'abcefg'), ('ccb', 'a'), ('ef', 'abcef')):
+            for C in set, frozenset, dict.fromkeys, str, list, tuple:
+                s = self.thetype('abcba')
+                self.assertEqual(s.symmetric_difference_update(C(p)), None)
+                self.assertEqual(s, set(q))
+
+    def test_ixor(self):
+        self.s ^= set(self.otherword)
+        for c in (self.word + self.otherword):
+            if (c in self.word) ^ (c in self.otherword):
+                self.assertIn(c, self.s)
+            else:
+                self.assertNotIn(c, self.s)
+
+    def test_inplace_on_self(self):
+        t = self.s.copy()
+        t |= t
+        self.assertEqual(t, self.s)
+        t &= t
+        self.assertEqual(t, self.s)
+        t -= t
+        self.assertEqual(t, self.thetype())
+        t = self.s.copy()
+        t ^= t
+        self.assertEqual(t, self.thetype())
+
+    def test_weakref(self):
+        s = self.thetype('gallahad')
+        p = weakref.proxy(s)
+        self.assertEqual(str(p), str(s))
+        s = None
+        self.assertRaises(ReferenceError, str, p)
+
+    def test_rich_compare(self):
+        class TestRichSetCompare:
+            def __gt__(self, some_set):
+                self.gt_called = True
+                return False
+            def __lt__(self, some_set):
+                self.lt_called = True
+                return False
+            def __ge__(self, some_set):
+                self.ge_called = True
+                return False
+            def __le__(self, some_set):
+                self.le_called = True
+                return False
+
+        # This first tries the builtin rich set comparison, which doesn't know
+        # how to handle the custom object. Upon returning NotImplemented, the
+        # corresponding comparison on the right object is invoked.
+        myset = {1, 2, 3}
+
+        myobj = TestRichSetCompare()
+        myset < myobj
+        self.assertTrue(myobj.gt_called)
+
+        myobj = TestRichSetCompare()
+        myset > myobj
+        self.assertTrue(myobj.lt_called)
+
+        myobj = TestRichSetCompare()
+        myset <= myobj
+        self.assertTrue(myobj.ge_called)
+
+        myobj = TestRichSetCompare()
+        myset >= myobj
+        self.assertTrue(myobj.le_called)
+
+    # C API test only available in a debug build
+    if hasattr(set, "test_c_api"):
+        def test_c_api(self):
+            self.assertEqual(set().test_c_api(), True)
+
+class SetSubclass(set):
+    pass
+
+class TestSetSubclass(TestSet):
+    thetype = SetSubclass
+    basetype = set
+
+class SetSubclassWithKeywordArgs(set):
+    def __init__(self, iterable=[], newarg=None):
+        set.__init__(self, iterable)
+
+class TestSetSubclassWithKeywordArgs(TestSet):
+
+    def test_keywords_in_subclass(self):
+        'SF bug #1486663 -- this used to erroneously raise a TypeError'
+        SetSubclassWithKeywordArgs(newarg=1)
+
+class TestFrozenSet(TestJointOps):
+    thetype = frozenset
+    basetype = frozenset
+
+    def test_init(self):
+        s = self.thetype(self.word)
+        s.__init__(self.otherword)
+        self.assertEqual(s, set(self.word))
+
+    def test_singleton_empty_frozenset(self):
+        f = frozenset()
+        efs = [frozenset(), frozenset([]), frozenset(()), frozenset(''),
+               frozenset(), frozenset([]), frozenset(()), frozenset(''),
+               frozenset(range(0)), frozenset(frozenset()),
+               frozenset(f), f]
+        # All of the empty frozensets should have just one id()
+        self.assertEqual(len(set(map(id, efs))), 1)
+
+    def test_constructor_identity(self):
+        s = self.thetype(range(3))
+        t = self.thetype(s)
+        self.assertEqual(id(s), id(t))
+
+    def test_hash(self):
+        self.assertEqual(hash(self.thetype('abcdeb')),
+                         hash(self.thetype('ebecda')))
+
+        # make sure that all permutations give the same hash value
+        n = 100
+        seq = [randrange(n) for i in range(n)]
+        results = set()
+        for i in range(200):
+            shuffle(seq)
+            results.add(hash(self.thetype(seq)))
+        self.assertEqual(len(results), 1)
+
+    def test_copy(self):
+        dup = self.s.copy()
+        self.assertEqual(id(self.s), id(dup))
+
+    def test_frozen_as_dictkey(self):
+        seq = list(range(10)) + list('abcdefg') + ['apple']
+        key1 = self.thetype(seq)
+        key2 = self.thetype(reversed(seq))
+        self.assertEqual(key1, key2)
+        self.assertNotEqual(id(key1), id(key2))
+        d = {}
+        d[key1] = 42
+        self.assertEqual(d[key2], 42)
+
+    def test_hash_caching(self):
+        f = self.thetype('abcdcda')
+        self.assertEqual(hash(f), hash(f))
+
+    def test_hash_effectiveness(self):
+        n = 13
+        hashvalues = set()
+        addhashvalue = hashvalues.add
+        elemmasks = [(i+1, 1<<i) for i in range(n)]
+        for i in range(2**n):
+            addhashvalue(hash(frozenset([e for e, m in elemmasks if m&i])))
+        self.assertEqual(len(hashvalues), 2**n)
+
+class FrozenSetSubclass(frozenset):
+    pass
+
+class TestFrozenSetSubclass(TestFrozenSet):
+    thetype = FrozenSetSubclass
+    basetype = frozenset
+
+    def test_constructor_identity(self):
+        s = self.thetype(range(3))
+        t = self.thetype(s)
+        self.assertNotEqual(id(s), id(t))
+
+    def test_copy(self):
+        dup = self.s.copy()
+        self.assertNotEqual(id(self.s), id(dup))
+
+    def test_nested_empty_constructor(self):
+        s = self.thetype()
+        t = self.thetype(s)
+        self.assertEqual(s, t)
+
+    def test_singleton_empty_frozenset(self):
+        Frozenset = self.thetype
+        f = frozenset()
+        F = Frozenset()
+        efs = [Frozenset(), Frozenset([]), Frozenset(()), Frozenset(''),
+               Frozenset(), Frozenset([]), Frozenset(()), Frozenset(''),
+               Frozenset(range(0)), Frozenset(Frozenset()),
+               Frozenset(frozenset()), f, F, Frozenset(f), Frozenset(F)]
+        # All empty frozenset subclass instances should have different ids
+        self.assertEqual(len(set(map(id, efs))), len(efs))
+
+# Tests taken from test_sets.py =============================================
+
+empty_set = set() # type: Any
+
+#==============================================================================
+
+class TestBasicOps(unittest.TestCase):
+
+    def test_repr(self):
+        if self.repr is not None:
+            self.assertEqual(repr(self.set), self.repr)
+
+    def check_repr_against_values(self):
+        text = repr(self.set)
+        self.assertTrue(text.startswith('{'))
+        self.assertTrue(text.endswith('}'))
+
+        result = text[1:-1].split(', ')
+        result.sort()
+        sorted_repr_values = [repr(value) for value in self.values]
+        sorted_repr_values.sort()
+        self.assertEqual(result, sorted_repr_values)
+
+    def test_print(self):
+        try:
+            fo = open(support.TESTFN, "w")
+            fo.write(str(self.set))
+            fo.close()
+            fo = open(support.TESTFN, "r")
+            self.assertEqual(fo.read(), repr(self.set))
+        finally:
+            fo.close()
+            support.unlink(support.TESTFN)
+
+    def test_length(self):
+        self.assertEqual(len(self.set), self.length)
+
+    def test_self_equality(self):
+        self.assertEqual(self.set, self.set)
+
+    def test_equivalent_equality(self):
+        self.assertEqual(self.set, self.dup)
+
+    def test_copy(self):
+        self.assertEqual(self.set.copy(), self.dup)
+
+    def test_self_union(self):
+        result = self.set | self.set
+        self.assertEqual(result, self.dup)
+
+    def test_empty_union(self):
+        result = self.set | empty_set
+        self.assertEqual(result, self.dup)
+
+    def test_union_empty(self):
+        result = empty_set | self.set
+        self.assertEqual(result, self.dup)
+
+    def test_self_intersection(self):
+        result = self.set & self.set
+        self.assertEqual(result, self.dup)
+
+    def test_empty_intersection(self):
+        result = self.set & empty_set
+        self.assertEqual(result, empty_set)
+
+    def test_intersection_empty(self):
+        result = empty_set & self.set
+        self.assertEqual(result, empty_set)
+
+    def test_self_isdisjoint(self):
+        result = self.set.isdisjoint(self.set)
+        self.assertEqual(result, not self.set)
+
+    def test_empty_isdisjoint(self):
+        result = self.set.isdisjoint(empty_set)
+        self.assertEqual(result, True)
+
+    def test_isdisjoint_empty(self):
+        result = empty_set.isdisjoint(self.set)
+        self.assertEqual(result, True)
+
+    def test_self_symmetric_difference(self):
+        result = self.set ^ self.set
+        self.assertEqual(result, empty_set)
+
+    def test_empty_symmetric_difference(self):
+        result = self.set ^ empty_set
+        self.assertEqual(result, self.set)
+
+    def test_self_difference(self):
+        result = self.set - self.set
+        self.assertEqual(result, empty_set)
+
+    def test_empty_difference(self):
+        result = self.set - empty_set
+        self.assertEqual(result, self.dup)
+
+    def test_empty_difference_rev(self):
+        result = empty_set - self.set
+        self.assertEqual(result, empty_set)
+
+    def test_iteration(self):
+        for v in self.set:
+            self.assertIn(v, self.values)
+        setiter = iter(self.set)
+        # note: __length_hint__ is an internal undocumented API,
+        # don't rely on it in your own programs
+        self.assertEqual(setiter.__length_hint__(), len(self.set))
+
+    def test_pickling(self):
+        p = pickle.dumps(self.set)
+        copy = pickle.loads(p)
+        self.assertEqual(self.set, copy,
+                         "%s != %s" % (self.set, copy))
+
+#------------------------------------------------------------------------------
+
+class TestBasicOpsEmpty(TestBasicOps):
+    def setUp(self):
+        self.case   = "empty set"
+        self.values = []
+        self.set    = set(self.values)
+        self.dup    = set(self.values)
+        self.length = 0
+        self.repr   = "set()"
+
+#------------------------------------------------------------------------------
+
+class TestBasicOpsSingleton(TestBasicOps):
+    def setUp(self):
+        self.case   = "unit set (number)"
+        self.values = [3]
+        self.set    = set(self.values)
+        self.dup    = set(self.values)
+        self.length = 1
+        self.repr   = "{3}"
+
+    def test_in(self):
+        self.assertIn(3, self.set)
+
+    def test_not_in(self):
+        self.assertNotIn(2, self.set)
+
+#------------------------------------------------------------------------------
+
+class TestBasicOpsTuple(TestBasicOps):
+    def setUp(self):
+        self.case   = "unit set (tuple)"
+        self.values = [(0, "zero")]
+        self.set    = set(self.values)
+        self.dup    = set(self.values)
+        self.length = 1
+        self.repr   = "{(0, 'zero')}"
+
+    def test_in(self):
+        self.assertIn((0, "zero"), self.set)
+
+    def test_not_in(self):
+        self.assertNotIn(9, self.set)
+
+#------------------------------------------------------------------------------
+
+class TestBasicOpsTriple(TestBasicOps):
+    def setUp(self):
+        self.case   = "triple set"
+        self.values = [0, "zero", operator.add]
+        self.set    = set(self.values)
+        self.dup    = set(self.values)
+        self.length = 3
+        self.repr   = None
+
+#------------------------------------------------------------------------------
+
+class TestBasicOpsString(TestBasicOps):
+    def setUp(self):
+        self.case   = "string set"
+        self.values = ["a", "b", "c"]
+        self.set    = set(self.values)
+        self.dup    = set(self.values)
+        self.length = 3
+
+    def test_repr(self):
+        self.check_repr_against_values()
+
+#------------------------------------------------------------------------------
+
+class TestBasicOpsBytes(TestBasicOps):
+    def setUp(self):
+        self.case   = "string set"
+        self.values = [b"a", b"b", b"c"]
+        self.set    = set(self.values)
+        self.dup    = set(self.values)
+        self.length = 3
+
+    def test_repr(self):
+        self.check_repr_against_values()
+
+#------------------------------------------------------------------------------
+
+class TestBasicOpsMixedStringBytes(TestBasicOps):
+    def setUp(self):
+        self._warning_filters = support.check_warnings()
+        self._warning_filters.__enter__()
+        warnings.simplefilter('ignore', BytesWarning)
+        self.case   = "string and bytes set"
+        self.values = ["a", "b", b"a", b"b"]
+        self.set    = set(self.values)
+        self.dup    = set(self.values)
+        self.length = 4
+
+    def tearDown(self):
+        self._warning_filters.__exit__(None, None, None)
+
+    def test_repr(self):
+        self.check_repr_against_values()
+
+#==============================================================================
+
+def baditer():
+    raise TypeError
+    yield True
+
+def gooditer():
+    yield True
+
+class TestExceptionPropagation(unittest.TestCase):
+    """SF 628246:  Set constructor should not trap iterator TypeErrors"""
+
+    def test_instanceWithException(self):
+        self.assertRaises(TypeError, set, baditer())
+
+    def test_instancesWithoutException(self):
+        # All of these iterables should load without exception.
+        set([1,2,3])
+        set((1,2,3))
+        set({'one':1, 'two':2, 'three':3})
+        set(range(3))
+        set('abc')
+        set(gooditer())
+
+    def test_changingSizeWhileIterating(self):
+        s = set([1,2,3])
+        try:
+            for i in s:
+                s.update([4])
+        except RuntimeError:
+            pass
+        else:
+            self.fail("no exception when changing size during iteration")
+
+#==============================================================================
+
+class TestSetOfSets(unittest.TestCase):
+    def test_constructor(self):
+        inner = frozenset([1])
+        outer = set([inner])
+        element = outer.pop()
+        self.assertEqual(type(element), frozenset)
+        outer.add(inner)        # Rebuild set of sets with .add method
+        outer.remove(inner)
+        self.assertEqual(outer, set())   # Verify that remove worked
+        outer.discard(inner)    # Absence of KeyError indicates working fine
+
+#==============================================================================
+
+class TestBinaryOps(unittest.TestCase):
+    def setUp(self):
+        self.set = set((2, 4, 6))
+
+    def test_eq(self):              # SF bug 643115
+        self.assertEqual(self.set, set({2:1,4:3,6:5}))
+
+    def test_union_subset(self):
+        result = self.set | set([2])
+        self.assertEqual(result, set((2, 4, 6)))
+
+    def test_union_superset(self):
+        result = self.set | set([2, 4, 6, 8])
+        self.assertEqual(result, set([2, 4, 6, 8]))
+
+    def test_union_overlap(self):
+        result = self.set | set([3, 4, 5])
+        self.assertEqual(result, set([2, 3, 4, 5, 6]))
+
+    def test_union_non_overlap(self):
+        result = self.set | set([8])
+        self.assertEqual(result, set([2, 4, 6, 8]))
+
+    def test_intersection_subset(self):
+        result = self.set & set((2, 4))
+        self.assertEqual(result, set((2, 4)))
+
+    def test_intersection_superset(self):
+        result = self.set & set([2, 4, 6, 8])
+        self.assertEqual(result, set([2, 4, 6]))
+
+    def test_intersection_overlap(self):
+        result = self.set & set([3, 4, 5])
+        self.assertEqual(result, set([4]))
+
+    def test_intersection_non_overlap(self):
+        result = self.set & set([8])
+        self.assertEqual(result, empty_set)
+
+    def test_isdisjoint_subset(self):
+        result = self.set.isdisjoint(set((2, 4)))
+        self.assertEqual(result, False)
+
+    def test_isdisjoint_superset(self):
+        result = self.set.isdisjoint(set([2, 4, 6, 8]))
+        self.assertEqual(result, False)
+
+    def test_isdisjoint_overlap(self):
+        result = self.set.isdisjoint(set([3, 4, 5]))
+        self.assertEqual(result, False)
+
+    def test_isdisjoint_non_overlap(self):
+        result = self.set.isdisjoint(set([8]))
+        self.assertEqual(result, True)
+
+    def test_sym_difference_subset(self):
+        result = self.set ^ set((2, 4))
+        self.assertEqual(result, set([6]))
+
+    def test_sym_difference_superset(self):
+        result = self.set ^ set((2, 4, 6, 8))
+        self.assertEqual(result, set([8]))
+
+    def test_sym_difference_overlap(self):
+        result = self.set ^ set((3, 4, 5))
+        self.assertEqual(result, set([2, 3, 5, 6]))
+
+    def test_sym_difference_non_overlap(self):
+        result = self.set ^ set([8])
+        self.assertEqual(result, set([2, 4, 6, 8]))
+
+#==============================================================================
+
+class TestUpdateOps(unittest.TestCase):
+    def setUp(self):
+        self.set = set((2, 4, 6))
+
+    def test_union_subset(self):
+        self.set |= set([2])
+        self.assertEqual(self.set, set((2, 4, 6)))
+
+    def test_union_superset(self):
+        self.set |= set([2, 4, 6, 8])
+        self.assertEqual(self.set, set([2, 4, 6, 8]))
+
+    def test_union_overlap(self):
+        self.set |= set([3, 4, 5])
+        self.assertEqual(self.set, set([2, 3, 4, 5, 6]))
+
+    def test_union_non_overlap(self):
+        self.set |= set([8])
+        self.assertEqual(self.set, set([2, 4, 6, 8]))
+
+    def test_union_method_call(self):
+        self.set.update(set([3, 4, 5]))
+        self.assertEqual(self.set, set([2, 3, 4, 5, 6]))
+
+    def test_intersection_subset(self):
+        self.set &= set((2, 4))
+        self.assertEqual(self.set, set((2, 4)))
+
+    def test_intersection_superset(self):
+        self.set &= set([2, 4, 6, 8])
+        self.assertEqual(self.set, set([2, 4, 6]))
+
+    def test_intersection_overlap(self):
+        self.set &= set([3, 4, 5])
+        self.assertEqual(self.set, set([4]))
+
+    def test_intersection_non_overlap(self):
+        self.set &= set([8])
+        self.assertEqual(self.set, empty_set)
+
+    def test_intersection_method_call(self):
+        self.set.intersection_update(set([3, 4, 5]))
+        self.assertEqual(self.set, set([4]))
+
+    def test_sym_difference_subset(self):
+        self.set ^= set((2, 4))
+        self.assertEqual(self.set, set([6]))
+
+    def test_sym_difference_superset(self):
+        self.set ^= set((2, 4, 6, 8))
+        self.assertEqual(self.set, set([8]))
+
+    def test_sym_difference_overlap(self):
+        self.set ^= set((3, 4, 5))
+        self.assertEqual(self.set, set([2, 3, 5, 6]))
+
+    def test_sym_difference_non_overlap(self):
+        self.set ^= set([8])
+        self.assertEqual(self.set, set([2, 4, 6, 8]))
+
+    def test_sym_difference_method_call(self):
+        self.set.symmetric_difference_update(set([3, 4, 5]))
+        self.assertEqual(self.set, set([2, 3, 5, 6]))
+
+    def test_difference_subset(self):
+        self.set -= set((2, 4))
+        self.assertEqual(self.set, set([6]))
+
+    def test_difference_superset(self):
+        self.set -= set((2, 4, 6, 8))
+        self.assertEqual(self.set, set([]))
+
+    def test_difference_overlap(self):
+        self.set -= set((3, 4, 5))
+        self.assertEqual(self.set, set([2, 6]))
+
+    def test_difference_non_overlap(self):
+        self.set -= set([8])
+        self.assertEqual(self.set, set([2, 4, 6]))
+
+    def test_difference_method_call(self):
+        self.set.difference_update(set([3, 4, 5]))
+        self.assertEqual(self.set, set([2, 6]))
+
+#==============================================================================
+
+class TestMutate(unittest.TestCase):
+    def setUp(self):
+        self.values = ["a", "b", "c"]
+        self.set = set(self.values)
+
+    def test_add_present(self):
+        self.set.add("c")
+        self.assertEqual(self.set, set("abc"))
+
+    def test_add_absent(self):
+        self.set.add("d")
+        self.assertEqual(self.set, set("abcd"))
+
+    def test_add_until_full(self):
+        tmp = set()
+        expected_len = 0
+        for v in self.values:
+            tmp.add(v)
+            expected_len += 1
+            self.assertEqual(len(tmp), expected_len)
+        self.assertEqual(tmp, self.set)
+
+    def test_remove_present(self):
+        self.set.remove("b")
+        self.assertEqual(self.set, set("ac"))
+
+    def test_remove_absent(self):
+        try:
+            self.set.remove("d")
+            self.fail("Removing missing element should have raised LookupError")
+        except LookupError:
+            pass
+
+    def test_remove_until_empty(self):
+        expected_len = len(self.set)
+        for v in self.values:
+            self.set.remove(v)
+            expected_len -= 1
+            self.assertEqual(len(self.set), expected_len)
+
+    def test_discard_present(self):
+        self.set.discard("c")
+        self.assertEqual(self.set, set("ab"))
+
+    def test_discard_absent(self):
+        self.set.discard("d")
+        self.assertEqual(self.set, set("abc"))
+
+    def test_clear(self):
+        self.set.clear()
+        self.assertEqual(len(self.set), 0)
+
+    def test_pop(self):
+        popped = {}
+        while self.set:
+            popped[self.set.pop()] = None
+        self.assertEqual(len(popped), len(self.values))
+        for v in self.values:
+            self.assertIn(v, popped)
+
+    def test_update_empty_tuple(self):
+        self.set.update(())
+        self.assertEqual(self.set, set(self.values))
+
+    def test_update_unit_tuple_overlap(self):
+        self.set.update(("a",))
+        self.assertEqual(self.set, set(self.values))
+
+    def test_update_unit_tuple_non_overlap(self):
+        self.set.update(("a", "z"))
+        self.assertEqual(self.set, set(self.values + ["z"]))
+
+#==============================================================================
+
+class TestSubsets(unittest.TestCase):
+
+    case2method = {"<=": "issubset",
+                   ">=": "issuperset",
+                  }
+
+    reverse = {"==": "==",
+               "!=": "!=",
+               "<":  ">",
+               ">":  "<",
+               "<=": ">=",
+               ">=": "<=",
+              }
+
+    def test_issubset(self):
+        raise NotImplementedError() # eval not supported below
+        x = self.left
+        y = self.right
+        for case in "!=", "==", "<", "<=", ">", ">=":
+            expected = case in self.cases
+            # Test the binary infix spelling.
+            result = None ## eval("x" + case + "y", locals())
+            self.assertEqual(result, expected)
+            # Test the "friendly" method-name spelling, if one exists.
+            if case in TestSubsets.case2method:
+                method = getattr(x, TestSubsets.case2method[case])
+                result = method(y)
+                self.assertEqual(result, expected)
+
+            # Now do the same for the operands reversed.
+            rcase = TestSubsets.reverse[case]
+            result = None ## eval("y" + rcase + "x", locals())
+            self.assertEqual(result, expected)
+            if rcase in TestSubsets.case2method:
+                method = getattr(y, TestSubsets.case2method[rcase])
+                result = method(x)
+                self.assertEqual(result, expected)
+#------------------------------------------------------------------------------
+
+class TestSubsetEqualEmpty(TestSubsets):
+    left  = set() # type: Any
+    right = set() # type: Any
+    name  = "both empty"
+    cases = "==", "<=", ">="
+
+#------------------------------------------------------------------------------
+
+class TestSubsetEqualNonEmpty(TestSubsets):
+    left  = set([1, 2])
+    right = set([1, 2])
+    name  = "equal pair"
+    cases = "==", "<=", ">="
+
+#------------------------------------------------------------------------------
+
+class TestSubsetEmptyNonEmpty(TestSubsets):
+    left  = set() # type: Any
+    right = set([1, 2])
+    name  = "one empty, one non-empty"
+    cases = "!=", "<", "<="
+
+#------------------------------------------------------------------------------
+
+class TestSubsetPartial(TestSubsets):
+    left  = set([1])
+    right = set([1, 2])
+    name  = "one a non-empty proper subset of other"
+    cases = "!=", "<", "<="
+
+#------------------------------------------------------------------------------
+
+class TestSubsetNonOverlap(TestSubsets):
+    left  = set([1])
+    right = set([2])
+    name  = "neither empty, neither contains"
+    cases = "!="
+
+#==============================================================================
+
+class TestOnlySetsInBinaryOps(unittest.TestCase):
+
+    def test_eq_ne(self):
+        # Unlike the others, this is testing that == and != *are* allowed.
+        self.assertEqual(self.other == self.set, False)
+        self.assertEqual(self.set == self.other, False)
+        self.assertEqual(self.other != self.set, True)
+        self.assertEqual(self.set != self.other, True)
+
+    def test_ge_gt_le_lt(self):
+        self.assertRaises(TypeError, lambda: self.set < self.other)
+        self.assertRaises(TypeError, lambda: self.set <= self.other)
+        self.assertRaises(TypeError, lambda: self.set > self.other)
+        self.assertRaises(TypeError, lambda: self.set >= self.other)
+
+        self.assertRaises(TypeError, lambda: self.other < self.set)
+        self.assertRaises(TypeError, lambda: self.other <= self.set)
+        self.assertRaises(TypeError, lambda: self.other > self.set)
+        self.assertRaises(TypeError, lambda: self.other >= self.set)
+
+    def test_update_operator(self):
+        try:
+            self.set |= self.other
+        except TypeError:
+            pass
+        else:
+            self.fail("expected TypeError")
+
+    def test_update(self):
+        if self.otherIsIterable:
+            self.set.update(self.other)
+        else:
+            self.assertRaises(TypeError, self.set.update, self.other)
+
+    def test_union(self):
+        self.assertRaises(TypeError, lambda: self.set | self.other)
+        self.assertRaises(TypeError, lambda: self.other | self.set)
+        if self.otherIsIterable:
+            self.set.union(self.other)
+        else:
+            self.assertRaises(TypeError, self.set.union, self.other)
+
+    def test_intersection_update_operator(self):
+        try:
+            self.set &= self.other
+        except TypeError:
+            pass
+        else:
+            self.fail("expected TypeError")
+
+    def test_intersection_update(self):
+        if self.otherIsIterable:
+            self.set.intersection_update(self.other)
+        else:
+            self.assertRaises(TypeError,
+                              self.set.intersection_update,
+                              self.other)
+
+    def test_intersection(self):
+        self.assertRaises(TypeError, lambda: self.set & self.other)
+        self.assertRaises(TypeError, lambda: self.other & self.set)
+        if self.otherIsIterable:
+            self.set.intersection(self.other)
+        else:
+            self.assertRaises(TypeError, self.set.intersection, self.other)
+
+    def test_sym_difference_update_operator(self):
+        try:
+            self.set ^= self.other
+        except TypeError:
+            pass
+        else:
+            self.fail("expected TypeError")
+
+    def test_sym_difference_update(self):
+        if self.otherIsIterable:
+            self.set.symmetric_difference_update(self.other)
+        else:
+            self.assertRaises(TypeError,
+                              self.set.symmetric_difference_update,
+                              self.other)
+
+    def test_sym_difference(self):
+        self.assertRaises(TypeError, lambda: self.set ^ self.other)
+        self.assertRaises(TypeError, lambda: self.other ^ self.set)
+        if self.otherIsIterable:
+            self.set.symmetric_difference(self.other)
+        else:
+            self.assertRaises(TypeError, self.set.symmetric_difference, self.other)
+
+    def test_difference_update_operator(self):
+        try:
+            self.set -= self.other
+        except TypeError:
+            pass
+        else:
+            self.fail("expected TypeError")
+
+    def test_difference_update(self):
+        if self.otherIsIterable:
+            self.set.difference_update(self.other)
+        else:
+            self.assertRaises(TypeError,
+                              self.set.difference_update,
+                              self.other)
+
+    def test_difference(self):
+        self.assertRaises(TypeError, lambda: self.set - self.other)
+        self.assertRaises(TypeError, lambda: self.other - self.set)
+        if self.otherIsIterable:
+            self.set.difference(self.other)
+        else:
+            self.assertRaises(TypeError, self.set.difference, self.other)
+
+#------------------------------------------------------------------------------
+
+class TestOnlySetsNumeric(TestOnlySetsInBinaryOps):
+    def setUp(self):
+        self.set   = set((1, 2, 3))
+        self.other = 19
+        self.otherIsIterable = False
+
+#------------------------------------------------------------------------------
+
+class TestOnlySetsDict(TestOnlySetsInBinaryOps):
+    def setUp(self):
+        self.set   = set((1, 2, 3))
+        self.other = {1:2, 3:4}
+        self.otherIsIterable = True
+
+#------------------------------------------------------------------------------
+
+class TestOnlySetsOperator(TestOnlySetsInBinaryOps):
+    def setUp(self):
+        self.set   = set((1, 2, 3))
+        self.other = operator.add
+        self.otherIsIterable = False
+
+#------------------------------------------------------------------------------
+
+class TestOnlySetsTuple(TestOnlySetsInBinaryOps):
+    def setUp(self):
+        self.set   = set((1, 2, 3))
+        self.other = (2, 4, 6)
+        self.otherIsIterable = True
+
+#------------------------------------------------------------------------------
+
+class TestOnlySetsString(TestOnlySetsInBinaryOps):
+    def setUp(self):
+        self.set   = set((1, 2, 3))
+        self.other = 'abc'
+        self.otherIsIterable = True
+
+#------------------------------------------------------------------------------
+
+class TestOnlySetsGenerator(TestOnlySetsInBinaryOps):
+    def setUp(self):
+        def gen():
+            for i in range(0, 10, 2):
+                yield i
+        self.set   = set((1, 2, 3))
+        self.other = gen()
+        self.otherIsIterable = True
+
+#==============================================================================
+
+class TestCopying(unittest.TestCase):
+
+    def test_copy(self):
+        dup = self.set.copy()
+        dup_list = sorted(dup, key=repr)
+        set_list = sorted(self.set, key=repr)
+        self.assertEqual(len(dup_list), len(set_list))
+        for i in range(len(dup_list)):
+            self.assertTrue(dup_list[i] is set_list[i])
+
+    def test_deep_copy(self):
+        dup = copy.deepcopy(self.set)
+        ##print type(dup), repr(dup)
+        dup_list = sorted(dup, key=repr)
+        set_list = sorted(self.set, key=repr)
+        self.assertEqual(len(dup_list), len(set_list))
+        for i in range(len(dup_list)):
+            self.assertEqual(dup_list[i], set_list[i])
+
+#------------------------------------------------------------------------------
+
+class TestCopyingEmpty(TestCopying):
+    def setUp(self):
+        self.set = set()
+
+#------------------------------------------------------------------------------
+
+class TestCopyingSingleton(TestCopying):
+    def setUp(self):
+        self.set = set(["hello"])
+
+#------------------------------------------------------------------------------
+
+class TestCopyingTriple(TestCopying):
+    def setUp(self):
+        self.set = set(["zero", 0, None])
+
+#------------------------------------------------------------------------------
+
+class TestCopyingTuple(TestCopying):
+    def setUp(self):
+        self.set = set([(1, 2)])
+
+#------------------------------------------------------------------------------
+
+class TestCopyingNested(TestCopying):
+    def setUp(self):
+        self.set = set([((1, 2), (3, 4))])
+
+#==============================================================================
+
+class TestIdentities(unittest.TestCase):
+    def setUp(self):
+        self.a = set('abracadabra')
+        self.b = set('alacazam')
+
+    def test_binopsVsSubsets(self):
+        a, b = self.a, self.b
+        self.assertTrue(a - b < a)
+        self.assertTrue(b - a < b)
+        self.assertTrue(a & b < a)
+        self.assertTrue(a & b < b)
+        self.assertTrue(a | b > a)
+        self.assertTrue(a | b > b)
+        self.assertTrue(a ^ b < a | b)
+
+    def test_commutativity(self):
+        a, b = self.a, self.b
+        self.assertEqual(a&b, b&a)
+        self.assertEqual(a|b, b|a)
+        self.assertEqual(a^b, b^a)
+        if a != b:
+            self.assertNotEqual(a-b, b-a)
+
+    def test_summations(self):
+        # check that sums of parts equal the whole
+        a, b = self.a, self.b
+        self.assertEqual((a-b)|(a&b)|(b-a), a|b)
+        self.assertEqual((a&b)|(a^b), a|b)
+        self.assertEqual(a|(b-a), a|b)
+        self.assertEqual((a-b)|b, a|b)
+        self.assertEqual((a-b)|(a&b), a)
+        self.assertEqual((b-a)|(a&b), b)
+        self.assertEqual((a-b)|(b-a), a^b)
+
+    def test_exclusion(self):
+        # check that inverse operations show non-overlap
+        a, b, zero = self.a, self.b, set()
+        self.assertEqual((a-b)&b, zero)
+        self.assertEqual((b-a)&a, zero)
+        self.assertEqual((a&b)&(a^b), zero)
+
+# Tests derived from test_itertools.py =======================================
+
+def R(seqn):
+    'Regular generator'
+    for i in seqn:
+        yield i
+
+class G:
+    'Sequence using __getitem__'
+    def __init__(self, seqn):
+        self.seqn = seqn
+    def __getitem__(self, i):
+        return self.seqn[i]
+
+class I:
+    'Sequence using iterator protocol'
+    def __init__(self, seqn):
+        self.seqn = seqn
+        self.i = 0
+    def __iter__(self):
+        return self
+    def __next__(self):
+        if self.i >= len(self.seqn): raise StopIteration
+        v = self.seqn[self.i]
+        self.i += 1
+        return v
+
+class Ig:
+    'Sequence using iterator protocol defined with a generator'
+    def __init__(self, seqn):
+        self.seqn = seqn
+        self.i = 0
+    def __iter__(self):
+        for val in self.seqn:
+            yield val
+
+class X:
+    'Missing __getitem__ and __iter__'
+    def __init__(self, seqn):
+        self.seqn = seqn
+        self.i = 0
+    def __next__(self):
+        if self.i >= len(self.seqn): raise StopIteration
+        v = self.seqn[self.i]
+        self.i += 1
+        return v
+
+class N:
+    'Iterator missing __next__()'
+    def __init__(self, seqn):
+        self.seqn = seqn
+        self.i = 0
+    def __iter__(self):
+        return self
+
+class E:
+    'Test propagation of exceptions'
+    def __init__(self, seqn):
+        self.seqn = seqn
+        self.i = 0
+    def __iter__(self):
+        return self
+    def __next__(self):
+        3 // 0
+
+class S:
+    'Test immediate stop'
+    def __init__(self, seqn):
+        pass
+    def __iter__(self):
+        return self
+    def __next__(self):
+        raise StopIteration
+
+from itertools import chain
+def L(seqn):
+    'Test multiple tiers of iterators'
+    return chain(map(lambda x:x, R(Ig(G(seqn)))))
+
+class TestVariousIteratorArgs(unittest.TestCase):
+
+    def test_constructor(self):
+        for cons in (set, frozenset):
+            for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)):
+                for g in (G, I, Ig, S, L, R):
+                    self.assertEqual(sorted(cons(g(s)), key=repr), sorted(g(s), key=repr))
+                self.assertRaises(TypeError, cons , X(s))
+                self.assertRaises(TypeError, cons , N(s))
+                self.assertRaises(ZeroDivisionError, cons , E(s))
+
+    def test_inline_methods(self):
+        s = set('november')
+        for data in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5), 'december'):
+            for meth in (s.union, s.intersection, s.difference, s.symmetric_difference, s.isdisjoint):
+                for g in (G, I, Ig, L, R):
+                    expected = meth(data)
+                    actual = meth(G(data))
+                    if isinstance(expected, bool):
+                        self.assertEqual(actual, expected)
+                    else:
+                        self.assertEqual(sorted(actual, key=repr), sorted(expected, key=repr))
+                self.assertRaises(TypeError, meth, X(s))
+                self.assertRaises(TypeError, meth, N(s))
+                self.assertRaises(ZeroDivisionError, meth, E(s))
+
+    def test_inplace_methods(self):
+        for data in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5), 'december'):
+            for methname in ('update', 'intersection_update',
+                             'difference_update', 'symmetric_difference_update'):
+                for g in (G, I, Ig, S, L, R):
+                    s = set('january')
+                    t = s.copy()
+                    getattr(s, methname)(list(g(data)))
+                    getattr(t, methname)(g(data))
+                    self.assertEqual(sorted(s, key=repr), sorted(t, key=repr))
+
+                self.assertRaises(TypeError, getattr(set('january'), methname), X(data))
+                self.assertRaises(TypeError, getattr(set('january'), methname), N(data))
+                self.assertRaises(ZeroDivisionError, getattr(set('january'), methname), E(data))
+
+be_bad = set2 = dict2 = None  # type: Any
+
+class bad_eq:
+    def __eq__(self, other):
+        if be_bad:
+            set2.clear()
+            raise ZeroDivisionError
+        return self is other
+    def __hash__(self):
+        return 0
+
+class bad_dict_clear:
+    def __eq__(self, other):
+        if be_bad:
+            dict2.clear()
+        return self is other
+    def __hash__(self):
+        return 0
+
+class TestWeirdBugs(unittest.TestCase):
+    def test_8420_set_merge(self):
+        # This used to segfault
+        global be_bad, set2, dict2
+        be_bad = False
+        set1 = {bad_eq()}
+        set2 = {bad_eq() for i in range(75)}
+        be_bad = True
+        self.assertRaises(ZeroDivisionError, set1.update, set2)
+
+        be_bad = False
+        set1 = {bad_dict_clear()}
+        dict2 = {bad_dict_clear(): None}
+        be_bad = True
+        set1.symmetric_difference_update(dict2)
+
+# Application tests (based on David Eppstein's graph recipes ====================================
+
+def powerset(U):
+    """Generates all subsets of a set or sequence U."""
+    U = iter(U)
+    try:
+        x = frozenset([next(U)])
+        for S in powerset(U):
+            yield S
+            yield S | x
+    except StopIteration:
+        yield frozenset()
+
+def cube(n):
+    """Graph of n-dimensional hypercube."""
+    singletons = [frozenset([x]) for x in range(n)]
+    return dict([(x, frozenset([x^s for s in singletons]))
+                 for x in powerset(range(n))])
+
+def linegraph(G):
+    """Graph, the vertices of which are edges of G,
+    with two vertices being adjacent iff the corresponding
+    edges share a vertex."""
+    L = {}
+    for x in G:
+        for y in G[x]:
+            nx = [frozenset([x,z]) for z in G[x] if z != y]
+            ny = [frozenset([y,z]) for z in G[y] if z != x]
+            L[frozenset([x,y])] = frozenset(nx+ny)
+    return L
+
+def faces(G):
+    'Return a set of faces in G.  Where a face is a set of vertices on that face'
+    # currently limited to triangles,squares, and pentagons
+    f = set()
+    for v1, edges in G.items():
+        for v2 in edges:
+            for v3 in G[v2]:
+                if v1 == v3:
+                    continue
+                if v1 in G[v3]:
+                    f.add(frozenset([v1, v2, v3]))
+                else:
+                    for v4 in G[v3]:
+                        if v4 == v2:
+                            continue
+                        if v1 in G[v4]:
+                            f.add(frozenset([v1, v2, v3, v4]))
+                        else:
+                            for v5 in G[v4]:
+                                if v5 == v3 or v5 == v2:
+                                    continue
+                                if v1 in G[v5]:
+                                    f.add(frozenset([v1, v2, v3, v4, v5]))
+    return f
+
+
+class TestGraphs(unittest.TestCase):
+
+    def test_cube(self):
+
+        g = cube(3)                             # vert --> {v1, v2, v3}
+        vertices1 = set(g)
+        self.assertEqual(len(vertices1), 8)     # eight vertices
+        for edge in g.values():
+            self.assertEqual(len(edge), 3)      # each vertex connects to three edges
+        vertices2 = set()
+        for edges in g.values():
+            for v in edges:
+                vertices2.add(v)
+        self.assertEqual(vertices1, vertices2)  # edge vertices in original set
+
+        cubefaces = faces(g)
+        self.assertEqual(len(cubefaces), 6)     # six faces
+        for face in cubefaces:
+            self.assertEqual(len(face), 4)      # each face is a square
+
+    def test_cuboctahedron(self):
+
+        # http://en.wikipedia.org/wiki/Cuboctahedron
+        # 8 triangular faces and 6 square faces
+        # 12 indentical vertices each connecting a triangle and square
+
+        g = cube(3)
+        cuboctahedron = linegraph(g)            # V( --> {V1, V2, V3, V4}
+        self.assertEqual(len(cuboctahedron), 12)# twelve vertices
+
+        vertices = set(cuboctahedron)
+        for edges in cuboctahedron.values():
+            self.assertEqual(len(edges), 4)     # each vertex connects to four other vertices
+        othervertices = set(edge for edges in cuboctahedron.values() for edge in edges)
+        self.assertEqual(vertices, othervertices)   # edge vertices in original set
+
+        cubofaces = faces(cuboctahedron)
+        facesizes = collections.defaultdict(int)
+        for face in cubofaces:
+            facesizes[len(face)] += 1
+        self.assertEqual(facesizes[3], 8)       # eight triangular faces
+        self.assertEqual(facesizes[4], 6)       # six square faces
+
+        for vertex in cuboctahedron:
+            edge = vertex                       # Cuboctahedron vertices are edges in Cube
+            self.assertEqual(len(edge), 2)      # Two cube vertices define an edge
+            for cubevert in edge:
+                self.assertIn(cubevert, g)
+
+
+#==============================================================================
+
+def test_main(verbose=None):
+    test_classes = (
+        TestSet,
+        TestSetSubclass,
+        TestSetSubclassWithKeywordArgs,
+        TestFrozenSet,
+        TestFrozenSetSubclass,
+        TestSetOfSets,
+        TestExceptionPropagation,
+        TestBasicOpsEmpty,
+        TestBasicOpsSingleton,
+        TestBasicOpsTuple,
+        TestBasicOpsTriple,
+        TestBasicOpsString,
+        TestBasicOpsBytes,
+        TestBasicOpsMixedStringBytes,
+        TestBinaryOps,
+        TestUpdateOps,
+        TestMutate,
+        TestSubsetEqualEmpty,
+        TestSubsetEqualNonEmpty,
+        TestSubsetEmptyNonEmpty,
+        TestSubsetPartial,
+        TestSubsetNonOverlap,
+        TestOnlySetsNumeric,
+        TestOnlySetsDict,
+        TestOnlySetsOperator,
+        TestOnlySetsTuple,
+        TestOnlySetsString,
+        TestOnlySetsGenerator,
+        TestCopyingEmpty,
+        TestCopyingSingleton,
+        TestCopyingTriple,
+        TestCopyingTuple,
+        TestCopyingNested,
+        TestIdentities,
+        TestVariousIteratorArgs,
+        TestGraphs,
+        TestWeirdBugs,
+        )
+
+    support.run_unittest(*test_classes)
+
+    # verify reference counting
+    if verbose and hasattr(sys, "gettotalrefcount"):
+        import gc
+        counts = [None] * 5
+        for i in range(len(counts)):
+            support.run_unittest(*test_classes)
+            gc.collect()
+            counts[i] = sys.gettotalrefcount()
+        print(counts)
+
+if __name__ == "__main__":
+    test_main(verbose=True)
diff --git a/test-data/stdlib-samples/3.2/test/test_shutil.py b/test-data/stdlib-samples/3.2/test/test_shutil.py
new file mode 100644
index 0000000..32e0fd1
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_shutil.py
@@ -0,0 +1,978 @@
+# Copyright (C) 2003 Python Software Foundation
+
+import unittest
+import shutil
+import tempfile
+import sys
+import stat
+import os
+import os.path
+import functools
+from test import support
+from test.support import TESTFN
+from os.path import splitdrive
+from distutils.spawn import find_executable, spawn
+from shutil import (_make_tarball, _make_zipfile, make_archive,
+                    register_archive_format, unregister_archive_format,
+                    get_archive_formats, Error, unpack_archive,
+                    register_unpack_format, RegistryError,
+                    unregister_unpack_format, get_unpack_formats)
+import tarfile
+import warnings
+
+from test import support
+from test.support import check_warnings, captured_stdout
+
+from typing import (
+    Any, Callable, Tuple, List, Sequence, BinaryIO, IO, Union, cast
+)
+from types import TracebackType
+
+import bz2
+BZ2_SUPPORTED = True
+
+TESTFN2 = TESTFN + "2"
+
+import grp
+import pwd
+UID_GID_SUPPORT = True
+
+import zlib
+
+import zipfile
+ZIP_SUPPORT = True
+
+def _fake_rename(*args: Any, **kwargs: Any) -> None:
+    # Pretend the destination path is on a different filesystem.
+    raise OSError()
+
+def mock_rename(func: Any) -> Any:
+    @functools.wraps(func)
+    def wrap(*args: Any, **kwargs: Any) -> Any:
+        try:
+            builtin_rename = shutil.rename
+            shutil.rename = cast(Any, _fake_rename)
+            return func(*args, **kwargs)
+        finally:
+            shutil.rename = cast(Any, builtin_rename)
+    return wrap
+
+class TestShutil(unittest.TestCase):
+
+    def setUp(self) -> None:
+        super().setUp()
+        self.tempdirs = []  # type: List[str]
+
+    def tearDown(self) -> None:
+        super().tearDown()
+        while self.tempdirs:
+            d = self.tempdirs.pop()
+            shutil.rmtree(d, os.name in ('nt', 'cygwin'))
+
+    def write_file(self, path: Union[str, List[str], tuple], content: str = 'xxx') -> None:
+        """Writes a file in the given path.
+
+
+        path can be a string or a sequence.
+        """
+        if isinstance(path, list):
+            path = os.path.join(*path)
+        elif isinstance(path, tuple):
+            path = cast(str, os.path.join(*path))
+        f = open(path, 'w')
+        try:
+            f.write(content)
+        finally:
+            f.close()
+
+    def mkdtemp(self) -> str:
+        """Create a temporary directory that will be cleaned up.
+
+        Returns the path of the directory.
+        """
+        d = tempfile.mkdtemp()
+        self.tempdirs.append(d)
+        return d
+
+    def test_rmtree_errors(self) -> None:
+        # filename is guaranteed not to exist
+        filename = tempfile.mktemp()
+        self.assertRaises(OSError, shutil.rmtree, filename)
+
+    # See bug #1071513 for why we don't run this on cygwin
+    # and bug #1076467 for why we don't run this as root.
+    if (hasattr(os, 'chmod') and sys.platform[:6] != 'cygwin'
+        and not (hasattr(os, 'geteuid') and os.geteuid() == 0)):
+        def test_on_error(self) -> None:
+            self.errorState = 0
+            os.mkdir(TESTFN)
+            self.childpath = os.path.join(TESTFN, 'a')
+            f = open(self.childpath, 'w')
+            f.close()
+            old_dir_mode = os.stat(TESTFN).st_mode
+            old_child_mode = os.stat(self.childpath).st_mode
+            # Make unwritable.
+            os.chmod(self.childpath, stat.S_IREAD)
+            os.chmod(TESTFN, stat.S_IREAD)
+
+            shutil.rmtree(TESTFN, onerror=self.check_args_to_onerror)
+            # Test whether onerror has actually been called.
+            self.assertEqual(self.errorState, 2,
+                             "Expected call to onerror function did not happen.")
+
+            # Make writable again.
+            os.chmod(TESTFN, old_dir_mode)
+            os.chmod(self.childpath, old_child_mode)
+
+            # Clean up.
+            shutil.rmtree(TESTFN)
+
+    def check_args_to_onerror(self, func: Callable[[str], Any], arg: str,
+                              exc: Tuple[type, BaseException,
+                                         TracebackType]) -> None:
+        # test_rmtree_errors deliberately runs rmtree
+        # on a directory that is chmod 400, which will fail.
+        # This function is run when shutil.rmtree fails.
+        # 99.9% of the time it initially fails to remove
+        # a file in the directory, so the first time through
+        # func is os.remove.
+        # However, some Linux machines running ZFS on
+        # FUSE experienced a failure earlier in the process
+        # at os.listdir.  The first failure may legally
+        # be either.
+        if self.errorState == 0:
+            if func is os.remove:
+                self.assertEqual(arg, self.childpath)
+            else:
+                self.assertIs(func, os.listdir,
+                              "func must be either os.remove or os.listdir")
+                self.assertEqual(arg, TESTFN)
+            self.assertTrue(issubclass(exc[0], OSError))
+            self.errorState = 1
+        else:
+            self.assertEqual(func, os.rmdir)
+            self.assertEqual(arg, TESTFN)
+            self.assertTrue(issubclass(exc[0], OSError))
+            self.errorState = 2
+
+    def test_rmtree_dont_delete_file(self) -> None:
+        # When called on a file instead of a directory, don't delete it.
+        handle, path = tempfile.mkstemp()
+        os.fdopen(handle).close()
+        self.assertRaises(OSError, shutil.rmtree, path)
+        os.remove(path)
+
+    def _write_data(self, path: str, data: str) -> None:
+        f = open(path, "w")
+        f.write(data)
+        f.close()
+
+    def test_copytree_simple(self) -> None:
+
+        def read_data(path: str) -> str:
+            f = open(path)
+            data = f.read()
+            f.close()
+            return data
+
+        src_dir = tempfile.mkdtemp()
+        dst_dir = os.path.join(tempfile.mkdtemp(), 'destination')
+        self._write_data(os.path.join(src_dir, 'test.txt'), '123')
+        os.mkdir(os.path.join(src_dir, 'test_dir'))
+        self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
+
+        try:
+            shutil.copytree(src_dir, dst_dir)
+            self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test.txt')))
+            self.assertTrue(os.path.isdir(os.path.join(dst_dir, 'test_dir')))
+            self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test_dir',
+                                                        'test.txt')))
+            actual = read_data(os.path.join(dst_dir, 'test.txt'))
+            self.assertEqual(actual, '123')
+            actual = read_data(os.path.join(dst_dir, 'test_dir', 'test.txt'))
+            self.assertEqual(actual, '456')
+        finally:
+            for path in (
+                    os.path.join(src_dir, 'test.txt'),
+                    os.path.join(dst_dir, 'test.txt'),
+                    os.path.join(src_dir, 'test_dir', 'test.txt'),
+                    os.path.join(dst_dir, 'test_dir', 'test.txt'),
+                ):
+                if os.path.exists(path):
+                    os.remove(path)
+            for path in (src_dir,
+                    os.path.dirname(dst_dir)
+                ):
+                if os.path.exists(path):
+                    shutil.rmtree(path)
+
+    def test_copytree_with_exclude(self) -> None:
+
+        def read_data(path: str) -> str:
+            f = open(path)
+            data = f.read()
+            f.close()
+            return data
+
+        # creating data
+        join = os.path.join
+        exists = os.path.exists
+        src_dir = tempfile.mkdtemp()
+        try:
+            dst_dir = join(tempfile.mkdtemp(), 'destination')
+            self._write_data(join(src_dir, 'test.txt'), '123')
+            self._write_data(join(src_dir, 'test.tmp'), '123')
+            os.mkdir(join(src_dir, 'test_dir'))
+            self._write_data(join(src_dir, 'test_dir', 'test.txt'), '456')
+            os.mkdir(join(src_dir, 'test_dir2'))
+            self._write_data(join(src_dir, 'test_dir2', 'test.txt'), '456')
+            os.mkdir(join(src_dir, 'test_dir2', 'subdir'))
+            os.mkdir(join(src_dir, 'test_dir2', 'subdir2'))
+            self._write_data(join(src_dir, 'test_dir2', 'subdir', 'test.txt'),
+                             '456')
+            self._write_data(join(src_dir, 'test_dir2', 'subdir2', 'test.py'),
+                             '456')
+
+
+            # testing glob-like patterns
+            try:
+                patterns = shutil.ignore_patterns('*.tmp', 'test_dir2')
+                shutil.copytree(src_dir, dst_dir, ignore=patterns)
+                # checking the result: some elements should not be copied
+                self.assertTrue(exists(join(dst_dir, 'test.txt')))
+                self.assertTrue(not exists(join(dst_dir, 'test.tmp')))
+                self.assertTrue(not exists(join(dst_dir, 'test_dir2')))
+            finally:
+                if os.path.exists(dst_dir):
+                    shutil.rmtree(dst_dir)
+            try:
+                patterns = shutil.ignore_patterns('*.tmp', 'subdir*')
+                shutil.copytree(src_dir, dst_dir, ignore=patterns)
+                # checking the result: some elements should not be copied
+                self.assertTrue(not exists(join(dst_dir, 'test.tmp')))
+                self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2')))
+                self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir')))
+            finally:
+                if os.path.exists(dst_dir):
+                    shutil.rmtree(dst_dir)
+
+            # testing callable-style
+            try:
+                def _filter(src: str, names: Sequence[str]) -> List[str]:
+                    res = []  # type: List[str]
+                    for name in names:
+                        path = os.path.join(src, name)
+
+                        if (os.path.isdir(path) and
+                            path.split()[-1] == 'subdir'):
+                            res.append(name)
+                        elif os.path.splitext(path)[-1] in ('.py'):
+                            res.append(name)
+                    return res
+
+                shutil.copytree(src_dir, dst_dir, ignore=_filter)
+
+                # checking the result: some elements should not be copied
+                self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2',
+                                        'test.py')))
+                self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir')))
+
+            finally:
+                if os.path.exists(dst_dir):
+                    shutil.rmtree(dst_dir)
+        finally:
+            shutil.rmtree(src_dir)
+            shutil.rmtree(os.path.dirname(dst_dir))
+
+    @unittest.skipUnless(hasattr(os, 'link'), 'requires os.link')
+    def test_dont_copy_file_onto_link_to_itself(self) -> None:
+        # Temporarily disable test on Windows.
+        if os.name == 'nt':
+            return
+        # bug 851123.
+        os.mkdir(TESTFN)
+        src = os.path.join(TESTFN, 'cheese')
+        dst = os.path.join(TESTFN, 'shop')
+        try:
+            with open(src, 'w') as f:
+                f.write('cheddar')
+            os.link(src, dst)
+            self.assertRaises(shutil.Error, shutil.copyfile, src, dst)
+            with open(src, 'r') as f:
+                self.assertEqual(f.read(), 'cheddar')
+            os.remove(dst)
+        finally:
+            shutil.rmtree(TESTFN, ignore_errors=True)
+
+    @support.skip_unless_symlink
+    def test_dont_copy_file_onto_symlink_to_itself(self) -> None:
+        # bug 851123.
+        os.mkdir(TESTFN)
+        src = os.path.join(TESTFN, 'cheese')
+        dst = os.path.join(TESTFN, 'shop')
+        try:
+            with open(src, 'w') as f:
+                f.write('cheddar')
+            # Using `src` here would mean we end up with a symlink pointing
+            # to TESTFN/TESTFN/cheese, while it should point at
+            # TESTFN/cheese.
+            os.symlink('cheese', dst)
+            self.assertRaises(shutil.Error, shutil.copyfile, src, dst)
+            with open(src, 'r') as f:
+                self.assertEqual(f.read(), 'cheddar')
+            os.remove(dst)
+        finally:
+            shutil.rmtree(TESTFN, ignore_errors=True)
+
+    @support.skip_unless_symlink
+    def test_rmtree_on_symlink(self) -> None:
+        # bug 1669.
+        os.mkdir(TESTFN)
+        try:
+            src = os.path.join(TESTFN, 'cheese')
+            dst = os.path.join(TESTFN, 'shop')
+            os.mkdir(src)
+            os.symlink(src, dst)
+            self.assertRaises(OSError, shutil.rmtree, dst)
+        finally:
+            shutil.rmtree(TESTFN, ignore_errors=True)
+
+    if hasattr(os, "mkfifo"):
+        # Issue #3002: copyfile and copytree block indefinitely on named pipes
+        def test_copyfile_named_pipe(self) -> None:
+            os.mkfifo(TESTFN)
+            try:
+                self.assertRaises(shutil.SpecialFileError,
+                                  shutil.copyfile, TESTFN, TESTFN2)
+                self.assertRaises(shutil.SpecialFileError,
+                                  shutil.copyfile, __file__, TESTFN)
+            finally:
+                os.remove(TESTFN)
+
+        @support.skip_unless_symlink
+        def test_copytree_named_pipe(self) -> None:
+            os.mkdir(TESTFN)
+            try:
+                subdir = os.path.join(TESTFN, "subdir")
+                os.mkdir(subdir)
+                pipe = os.path.join(subdir, "mypipe")
+                os.mkfifo(pipe)
+                try:
+                    shutil.copytree(TESTFN, TESTFN2)
+                except shutil.Error as e:
+                    errors = e.args[0]
+                    self.assertEqual(len(errors), 1)
+                    src, dst, error_msg = errors[0]
+                    self.assertEqual("`%s` is a named pipe" % pipe, error_msg)
+                else:
+                    self.fail("shutil.Error should have been raised")
+            finally:
+                shutil.rmtree(TESTFN, ignore_errors=True)
+                shutil.rmtree(TESTFN2, ignore_errors=True)
+
+    def test_copytree_special_func(self) -> None:
+
+        src_dir = self.mkdtemp()
+        dst_dir = os.path.join(self.mkdtemp(), 'destination')
+        self._write_data(os.path.join(src_dir, 'test.txt'), '123')
+        os.mkdir(os.path.join(src_dir, 'test_dir'))
+        self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
+
+        copied = []  # type: List[Tuple[str, str]]
+        def _copy(src: str, dst: str) -> None:
+            copied.append((src, dst))
+
+        shutil.copytree(src_dir, dst_dir, copy_function=_copy)
+        self.assertEqual(len(copied), 2)
+
+    @support.skip_unless_symlink
+    def test_copytree_dangling_symlinks(self) -> None:
+
+        # a dangling symlink raises an error at the end
+        src_dir = self.mkdtemp()
+        dst_dir = os.path.join(self.mkdtemp(), 'destination')
+        os.symlink('IDONTEXIST', os.path.join(src_dir, 'test.txt'))
+        os.mkdir(os.path.join(src_dir, 'test_dir'))
+        self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
+        self.assertRaises(Error, shutil.copytree, src_dir, dst_dir)
+
+        # a dangling symlink is ignored with the proper flag
+        dst_dir = os.path.join(self.mkdtemp(), 'destination2')
+        shutil.copytree(src_dir, dst_dir, ignore_dangling_symlinks=True)
+        self.assertNotIn('test.txt', os.listdir(dst_dir))
+
+        # a dangling symlink is copied if symlinks=True
+        dst_dir = os.path.join(self.mkdtemp(), 'destination3')
+        shutil.copytree(src_dir, dst_dir, symlinks=True)
+        self.assertIn('test.txt', os.listdir(dst_dir))
+
+    def _copy_file(self,
+                   method: Callable[[str, str], None]) -> Tuple[str, str]:
+        fname = 'test.txt'
+        tmpdir = self.mkdtemp()
+        self.write_file([tmpdir, fname])
+        file1 = os.path.join(tmpdir, fname)
+        tmpdir2 = self.mkdtemp()
+        method(file1, tmpdir2)
+        file2 = os.path.join(tmpdir2, fname)
+        return (file1, file2)
+
+    @unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod')
+    def test_copy(self) -> None:
+        # Ensure that the copied file exists and has the same mode bits.
+        file1, file2 = self._copy_file(shutil.copy)
+        self.assertTrue(os.path.exists(file2))
+        self.assertEqual(os.stat(file1).st_mode, os.stat(file2).st_mode)
+
+    @unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod')
+    @unittest.skipUnless(hasattr(os, 'utime'), 'requires os.utime')
+    def test_copy2(self) -> None:
+        # Ensure that the copied file exists and has the same mode and
+        # modification time bits.
+        file1, file2 = self._copy_file(shutil.copy2)
+        self.assertTrue(os.path.exists(file2))
+        file1_stat = os.stat(file1)
+        file2_stat = os.stat(file2)
+        self.assertEqual(file1_stat.st_mode, file2_stat.st_mode)
+        for attr in 'st_atime', 'st_mtime':
+            # The modification times may be truncated in the new file.
+            self.assertLessEqual(getattr(file1_stat, attr),
+                                 getattr(file2_stat, attr) + 1)
+        if hasattr(os, 'chflags') and hasattr(file1_stat, 'st_flags'):
+            self.assertEqual(getattr(file1_stat, 'st_flags'),
+                             getattr(file2_stat, 'st_flags'))
+
+    @unittest.skipUnless(zlib, "requires zlib")
+    def test_make_tarball(self) -> None:
+        # creating something to tar
+        tmpdir = self.mkdtemp()
+        self.write_file([tmpdir, 'file1'], 'xxx')
+        self.write_file([tmpdir, 'file2'], 'xxx')
+        os.mkdir(os.path.join(tmpdir, 'sub'))
+        self.write_file([tmpdir, 'sub', 'file3'], 'xxx')
+
+        tmpdir2 = self.mkdtemp()
+        # force shutil to create the directory
+        os.rmdir(tmpdir2)
+        unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0],
+                            "source and target should be on same drive")
+
+        base_name = os.path.join(tmpdir2, 'archive')
+
+        # working with relative paths to avoid tar warnings
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            _make_tarball(splitdrive(base_name)[1], '.')
+        finally:
+            os.chdir(old_dir)
+
+        # check if the compressed tarball was created
+        tarball = base_name + '.tar.gz'
+        self.assertTrue(os.path.exists(tarball))
+
+        # trying an uncompressed one
+        base_name = os.path.join(tmpdir2, 'archive')
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            _make_tarball(splitdrive(base_name)[1], '.', compress=None)
+        finally:
+            os.chdir(old_dir)
+        tarball = base_name + '.tar'
+        self.assertTrue(os.path.exists(tarball))
+
+    def _tarinfo(self, path: str) -> tuple:
+        tar = tarfile.open(path)
+        try:
+            names = tar.getnames()
+            names.sort()
+            return tuple(names)
+        finally:
+            tar.close()
+
+    def _create_files(self) -> Tuple[str, str, str]:
+        # creating something to tar
+        tmpdir = self.mkdtemp()
+        dist = os.path.join(tmpdir, 'dist')
+        os.mkdir(dist)
+        self.write_file([dist, 'file1'], 'xxx')
+        self.write_file([dist, 'file2'], 'xxx')
+        os.mkdir(os.path.join(dist, 'sub'))
+        self.write_file([dist, 'sub', 'file3'], 'xxx')
+        os.mkdir(os.path.join(dist, 'sub2'))
+        tmpdir2 = self.mkdtemp()
+        base_name = os.path.join(tmpdir2, 'archive')
+        return tmpdir, tmpdir2, base_name
+
+    @unittest.skipUnless(zlib, "Requires zlib")
+    @unittest.skipUnless(find_executable('tar') and find_executable('gzip'),
+                         'Need the tar command to run')
+    def test_tarfile_vs_tar(self) -> None:
+        tmpdir, tmpdir2, base_name =  self._create_files()
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            _make_tarball(base_name, 'dist')
+        finally:
+            os.chdir(old_dir)
+
+        # check if the compressed tarball was created
+        tarball = base_name + '.tar.gz'
+        self.assertTrue(os.path.exists(tarball))
+
+        # now create another tarball using `tar`
+        tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
+        tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
+        gzip_cmd = ['gzip', '-f9', 'archive2.tar']
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            with captured_stdout() as s:
+                spawn(tar_cmd)
+                spawn(gzip_cmd)
+        finally:
+            os.chdir(old_dir)
+
+        self.assertTrue(os.path.exists(tarball2))
+        # let's compare both tarballs
+        self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2))
+
+        # trying an uncompressed one
+        base_name = os.path.join(tmpdir2, 'archive')
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            _make_tarball(base_name, 'dist', compress=None)
+        finally:
+            os.chdir(old_dir)
+        tarball = base_name + '.tar'
+        self.assertTrue(os.path.exists(tarball))
+
+        # now for a dry_run
+        base_name = os.path.join(tmpdir2, 'archive')
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            _make_tarball(base_name, 'dist', compress=None, dry_run=True)
+        finally:
+            os.chdir(old_dir)
+        tarball = base_name + '.tar'
+        self.assertTrue(os.path.exists(tarball))
+
+    @unittest.skipUnless(zlib, "Requires zlib")
+    @unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run')
+    def test_make_zipfile(self) -> None:
+        # creating something to tar
+        tmpdir = self.mkdtemp()
+        self.write_file([tmpdir, 'file1'], 'xxx')
+        self.write_file([tmpdir, 'file2'], 'xxx')
+
+        tmpdir2 = self.mkdtemp()
+        # force shutil to create the directory
+        os.rmdir(tmpdir2)
+        base_name = os.path.join(tmpdir2, 'archive')
+        _make_zipfile(base_name, tmpdir)
+
+        # check if the compressed tarball was created
+        tarball = base_name + '.zip'
+        self.assertTrue(os.path.exists(tarball))
+
+
+    def test_make_archive(self) -> None:
+        tmpdir = self.mkdtemp()
+        base_name = os.path.join(tmpdir, 'archive')
+        self.assertRaises(ValueError, make_archive, base_name, 'xxx')
+
+    @unittest.skipUnless(zlib, "Requires zlib")
+    def test_make_archive_owner_group(self) -> None:
+        # testing make_archive with owner and group, with various combinations
+        # this works even if there's not gid/uid support
+        if UID_GID_SUPPORT:
+            group = grp.getgrgid(0).gr_name
+            owner = pwd.getpwuid(0).pw_name
+        else:
+            group = owner = 'root'
+
+        base_dir, root_dir, base_name =  self._create_files()
+        base_name = os.path.join(self.mkdtemp() , 'archive')
+        res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner,
+                           group=group)
+        self.assertTrue(os.path.exists(res))
+
+        res = make_archive(base_name, 'zip', root_dir, base_dir)
+        self.assertTrue(os.path.exists(res))
+
+        res = make_archive(base_name, 'tar', root_dir, base_dir,
+                           owner=owner, group=group)
+        self.assertTrue(os.path.exists(res))
+
+        res = make_archive(base_name, 'tar', root_dir, base_dir,
+                           owner='kjhkjhkjg', group='oihohoh')
+        self.assertTrue(os.path.exists(res))
+
+
+    @unittest.skipUnless(zlib, "Requires zlib")
+    @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support")
+    def test_tarfile_root_owner(self) -> None:
+        tmpdir, tmpdir2, base_name =  self._create_files()
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        group = grp.getgrgid(0).gr_name
+        owner = pwd.getpwuid(0).pw_name
+        try:
+            archive_name = _make_tarball(base_name, 'dist', compress=None,
+                                         owner=owner, group=group)
+        finally:
+            os.chdir(old_dir)
+
+        # check if the compressed tarball was created
+        self.assertTrue(os.path.exists(archive_name))
+
+        # now checks the rights
+        archive = tarfile.open(archive_name)
+        try:
+            for member in archive.getmembers():
+                self.assertEqual(member.uid, 0)
+                self.assertEqual(member.gid, 0)
+        finally:
+            archive.close()
+
+    def test_make_archive_cwd(self) -> None:
+        current_dir = os.getcwd()
+        def _breaks(*args: Any, **kw: Any) -> None:
+            raise RuntimeError()
+
+        register_archive_format('xxx', _breaks, [], 'xxx file')
+        try:
+            try:
+                make_archive('xxx', 'xxx', root_dir=self.mkdtemp())
+            except Exception:
+                pass
+            self.assertEqual(os.getcwd(), current_dir)
+        finally:
+            unregister_archive_format('xxx')
+
+    def test_register_archive_format(self) -> None:
+
+        self.assertRaises(TypeError, register_archive_format, 'xxx', 1)
+        self.assertRaises(TypeError, register_archive_format, 'xxx',
+                          lambda: 1/0,
+                          1)
+        self.assertRaises(TypeError, register_archive_format, 'xxx',
+                          lambda: 1/0,
+                          [(1, 2), (1, 2, 3)])
+
+        register_archive_format('xxx', lambda: 1/0, [('x', 2)], 'xxx file')
+        formats = [name for name, params in get_archive_formats()]
+        self.assertIn('xxx', formats)
+
+        unregister_archive_format('xxx')
+        formats = [name for name, params in get_archive_formats()]
+        self.assertNotIn('xxx', formats)
+
+    def _compare_dirs(self, dir1: str, dir2: str) -> List[str]:
+        # check that dir1 and dir2 are equivalent,
+        # return the diff
+        diff = []  # type: List[str]
+        for root, dirs, files in os.walk(dir1):
+            for file_ in files:
+                path = os.path.join(root, file_)
+                target_path = os.path.join(dir2, os.path.split(path)[-1])
+                if not os.path.exists(target_path):
+                    diff.append(file_)
+        return diff
+
+    @unittest.skipUnless(zlib, "Requires zlib")
+    def test_unpack_archive(self) -> None:
+        formats = ['tar', 'gztar', 'zip']
+        if BZ2_SUPPORTED:
+            formats.append('bztar')
+
+        for format in formats:
+            tmpdir = self.mkdtemp()
+            base_dir, root_dir, base_name =  self._create_files()
+            tmpdir2 = self.mkdtemp()
+            filename = make_archive(base_name, format, root_dir, base_dir)
+
+            # let's try to unpack it now
+            unpack_archive(filename, tmpdir2)
+            diff = self._compare_dirs(tmpdir, tmpdir2)
+            self.assertEqual(diff, [])
+
+            # and again, this time with the format specified
+            tmpdir3 = self.mkdtemp()
+            unpack_archive(filename, tmpdir3, format=format)
+            diff = self._compare_dirs(tmpdir, tmpdir3)
+            self.assertEqual(diff, [])
+        self.assertRaises(shutil.ReadError, unpack_archive, TESTFN)
+        self.assertRaises(ValueError, unpack_archive, TESTFN, format='xxx')
+
+    def test_unpack_registery(self) -> None:
+
+        formats = get_unpack_formats()
+
+        def _boo(filename: str, extract_dir: str, extra: int) -> None:
+            self.assertEqual(extra, 1)
+            self.assertEqual(filename, 'stuff.boo')
+            self.assertEqual(extract_dir, 'xx')
+
+        register_unpack_format('Boo', ['.boo', '.b2'], _boo, [('extra', 1)])
+        unpack_archive('stuff.boo', 'xx')
+
+        # trying to register a .boo unpacker again
+        self.assertRaises(RegistryError, register_unpack_format, 'Boo2',
+                          ['.boo'], _boo)
+
+        # should work now
+        unregister_unpack_format('Boo')
+        register_unpack_format('Boo2', ['.boo'], _boo)
+        self.assertIn(('Boo2', ['.boo'], ''), get_unpack_formats())
+        self.assertNotIn(('Boo', ['.boo'], ''), get_unpack_formats())
+
+        # let's leave a clean state
+        unregister_unpack_format('Boo2')
+        self.assertEqual(get_unpack_formats(), formats)
+
+
+class TestMove(unittest.TestCase):
+
+    def setUp(self) -> None:
+        filename = "foo"
+        self.src_dir = tempfile.mkdtemp()
+        self.dst_dir = tempfile.mkdtemp()
+        self.src_file = os.path.join(self.src_dir, filename)
+        self.dst_file = os.path.join(self.dst_dir, filename)
+        with open(self.src_file, "wb") as f:
+            f.write(b"spam")
+
+    def tearDown(self) -> None:
+        for d in (self.src_dir, self.dst_dir):
+            try:
+                if d:
+                    shutil.rmtree(d)
+            except:
+                pass
+
+    def _check_move_file(self, src: str, dst: str, real_dst: str) -> None:
+        with open(src, "rb") as f:
+            contents = f.read()
+        shutil.move(src, dst)
+        with open(real_dst, "rb") as f:
+            self.assertEqual(contents, f.read())
+        self.assertFalse(os.path.exists(src))
+
+    def _check_move_dir(self, src: str, dst: str, real_dst: str) -> None:
+        contents = sorted(os.listdir(src))
+        shutil.move(src, dst)
+        self.assertEqual(contents, sorted(os.listdir(real_dst)))
+        self.assertFalse(os.path.exists(src))
+
+    def test_move_file(self) -> None:
+        # Move a file to another location on the same filesystem.
+        self._check_move_file(self.src_file, self.dst_file, self.dst_file)
+
+    def test_move_file_to_dir(self) -> None:
+        # Move a file inside an existing dir on the same filesystem.
+        self._check_move_file(self.src_file, self.dst_dir, self.dst_file)
+
+    @mock_rename
+    def test_move_file_other_fs(self) -> None:
+        # Move a file to an existing dir on another filesystem.
+        self.test_move_file()
+
+    @mock_rename
+    def test_move_file_to_dir_other_fs(self) -> None:
+        # Move a file to another location on another filesystem.
+        self.test_move_file_to_dir()
+
+    def test_move_dir(self) -> None:
+        # Move a dir to another location on the same filesystem.
+        dst_dir = tempfile.mktemp()
+        try:
+            self._check_move_dir(self.src_dir, dst_dir, dst_dir)
+        finally:
+            try:
+                shutil.rmtree(dst_dir)
+            except:
+                pass
+
+    @mock_rename
+    def test_move_dir_other_fs(self) -> None:
+        # Move a dir to another location on another filesystem.
+        self.test_move_dir()
+
+    def test_move_dir_to_dir(self) -> None:
+        # Move a dir inside an existing dir on the same filesystem.
+        self._check_move_dir(self.src_dir, self.dst_dir,
+            os.path.join(self.dst_dir, os.path.basename(self.src_dir)))
+
+    @mock_rename
+    def test_move_dir_to_dir_other_fs(self) -> None:
+        # Move a dir inside an existing dir on another filesystem.
+        self.test_move_dir_to_dir()
+
+    def test_existing_file_inside_dest_dir(self) -> None:
+        # A file with the same name inside the destination dir already exists.
+        with open(self.dst_file, "wb"):
+            pass
+        self.assertRaises(shutil.Error, shutil.move, self.src_file, self.dst_dir)
+
+    def test_dont_move_dir_in_itself(self) -> None:
+        # Moving a dir inside itself raises an Error.
+        dst = os.path.join(self.src_dir, "bar")
+        self.assertRaises(shutil.Error, shutil.move, self.src_dir, dst)
+
+    def test_destinsrc_false_negative(self) -> None:
+        os.mkdir(TESTFN)
+        try:
+            for src, dst in [('srcdir', 'srcdir/dest')]:
+                src = os.path.join(TESTFN, src)
+                dst = os.path.join(TESTFN, dst)
+                self.assertTrue(shutil._destinsrc(src, dst),
+                             msg='_destinsrc() wrongly concluded that '
+                             'dst (%s) is not in src (%s)' % (dst, src))
+        finally:
+            shutil.rmtree(TESTFN, ignore_errors=True)
+
+    def test_destinsrc_false_positive(self) -> None:
+        os.mkdir(TESTFN)
+        try:
+            for src, dst in [('srcdir', 'src/dest'), ('srcdir', 'srcdir.new')]:
+                src = os.path.join(TESTFN, src)
+                dst = os.path.join(TESTFN, dst)
+                self.assertFalse(shutil._destinsrc(src, dst),
+                            msg='_destinsrc() wrongly concluded that '
+                            'dst (%s) is in src (%s)' % (dst, src))
+        finally:
+            shutil.rmtree(TESTFN, ignore_errors=True)
+
+
+class TestCopyFile(unittest.TestCase):
+
+    _delete = False
+
+    class Faux(object):
+        _entered = False
+        _exited_with = None # type: tuple
+        _raised = False
+        def __init__(self, raise_in_exit: bool = False,
+                     suppress_at_exit: bool = True) -> None:
+            self._raise_in_exit = raise_in_exit
+            self._suppress_at_exit = suppress_at_exit
+        def read(self, *args: Any) -> str:
+            return ''
+        def __enter__(self) -> None:
+            self._entered = True
+        def __exit__(self, exc_type: type, exc_val: BaseException,
+                     exc_tb: TracebackType) -> bool:
+            self._exited_with = exc_type, exc_val, exc_tb
+            if self._raise_in_exit:
+                self._raised = True
+                raise IOError("Cannot close")
+            return self._suppress_at_exit
+
+    def tearDown(self) -> None:
+        shutil.open = open
+
+    def _set_shutil_open(self, func: Any) -> None:
+        shutil.open = func
+        self._delete = True
+
+    def test_w_source_open_fails(self) -> None:
+        def _open(filename: str, mode: str= 'r') -> BinaryIO:
+            if filename == 'srcfile':
+                raise IOError('Cannot open "srcfile"')
+            assert 0  # shouldn't reach here.
+
+        self._set_shutil_open(_open)
+
+        self.assertRaises(IOError, shutil.copyfile, 'srcfile', 'destfile')
+
+    def test_w_dest_open_fails(self) -> None:
+
+        srcfile = TestCopyFile.Faux()
+
+        def _open(filename: str, mode: str = 'r') -> TestCopyFile.Faux:
+            if filename == 'srcfile':
+                return srcfile
+            if filename == 'destfile':
+                raise IOError('Cannot open "destfile"')
+            assert 0  # shouldn't reach here.
+
+        self._set_shutil_open(_open)
+
+        shutil.copyfile('srcfile', 'destfile')
+        self.assertTrue(srcfile._entered)
+        self.assertTrue(srcfile._exited_with[0] is IOError)
+        self.assertEqual(srcfile._exited_with[1].args,
+                         ('Cannot open "destfile"',))
+
+    def test_w_dest_close_fails(self) -> None:
+
+        srcfile = TestCopyFile.Faux()
+        destfile = TestCopyFile.Faux(True)
+
+        def _open(filename: str, mode: str = 'r') -> TestCopyFile.Faux:
+            if filename == 'srcfile':
+                return srcfile
+            if filename == 'destfile':
+                return destfile
+            assert 0  # shouldn't reach here.
+
+        self._set_shutil_open(_open)
+
+        shutil.copyfile('srcfile', 'destfile')
+        self.assertTrue(srcfile._entered)
+        self.assertTrue(destfile._entered)
+        self.assertTrue(destfile._raised)
+        self.assertTrue(srcfile._exited_with[0] is IOError)
+        self.assertEqual(srcfile._exited_with[1].args,
+                         ('Cannot close',))
+
+    def test_w_source_close_fails(self) -> None:
+
+        srcfile = TestCopyFile.Faux(True)
+        destfile = TestCopyFile.Faux()
+
+        def _open(filename: str, mode: str= 'r') -> TestCopyFile.Faux:
+            if filename == 'srcfile':
+                return srcfile
+            if filename == 'destfile':
+                return destfile
+            assert 0  # shouldn't reach here.
+
+        self._set_shutil_open(_open)
+
+        self.assertRaises(IOError,
+                          shutil.copyfile, 'srcfile', 'destfile')
+        self.assertTrue(srcfile._entered)
+        self.assertTrue(destfile._entered)
+        self.assertFalse(destfile._raised)
+        self.assertTrue(srcfile._exited_with[0] is None)
+        self.assertTrue(srcfile._raised)
+
+    def test_move_dir_caseinsensitive(self) -> None:
+        # Renames a folder to the same name
+        # but a different case.
+
+        self.src_dir = tempfile.mkdtemp()
+        dst_dir = os.path.join(
+                os.path.dirname(self.src_dir),
+                os.path.basename(self.src_dir).upper())
+        self.assertNotEqual(self.src_dir, dst_dir)
+
+        try:
+            shutil.move(self.src_dir, dst_dir)
+            self.assertTrue(os.path.isdir(dst_dir))
+        finally:
+            if os.path.exists(dst_dir):
+                os.rmdir(dst_dir)
+
+
+
+def test_main() -> None:
+    support.run_unittest(TestShutil, TestMove, TestCopyFile)
+
+if __name__ == '__main__':
+    test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_subprocess.py b/test-data/stdlib-samples/3.2/test/test_subprocess.py
new file mode 100644
index 0000000..772d8cc
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_subprocess.py
@@ -0,0 +1,1764 @@
+import unittest
+from test import support
+import subprocess
+import sys
+import signal
+import io
+import os
+import errno
+import tempfile
+import time
+import re
+import sysconfig
+import warnings
+import select
+import shutil
+import gc
+
+import resource
+
+from typing import Any, Dict, Callable, Iterable, List, Set, Tuple, cast
+
+mswindows = (sys.platform == "win32")
+
+#
+# Depends on the following external programs: Python
+#
+
+if mswindows:
+    SETBINARY = ('import msvcrt; msvcrt.setmode(sys.stdout.fileno(), '
+                                                'os.O_BINARY);')
+else:
+    SETBINARY = ''
+
+
+try:
+    mkstemp = tempfile.mkstemp
+except AttributeError:
+    # tempfile.mkstemp is not available
+    def _mkstemp() -> Tuple[int, str]:
+        """Replacement for mkstemp, calling mktemp."""
+        fname = tempfile.mktemp()
+        return os.open(fname, os.O_RDWR|os.O_CREAT), fname
+    mkstemp = cast(Any, _mkstemp)
+
+
+class BaseTestCase(unittest.TestCase):
+    def setUp(self) -> None:
+        # Try to minimize the number of children we have so this test
+        # doesn't crash on some buildbots (Alphas in particular).
+        support.reap_children()
+
+    def tearDown(self) -> None:
+        for inst in subprocess._active:
+            inst.wait()
+        subprocess._cleanup()
+        self.assertFalse(subprocess._active, "subprocess._active not empty")
+
+    def assertStderrEqual(self, stderr: bytes, expected: bytes,
+                          msg: object = None) -> None:
+        # In a debug build, stuff like "[6580 refs]" is printed to stderr at
+        # shutdown time.  That frustrates tests trying to check stderr produced
+        # from a spawned Python process.
+        actual = support.strip_python_stderr(stderr)
+        self.assertEqual(actual, expected, msg)
+
+
+class ProcessTestCase(BaseTestCase):
+
+    def test_call_seq(self) -> None:
+        # call() function with sequence argument
+        rc = subprocess.call([sys.executable, "-c",
+                              "import sys; sys.exit(47)"])
+        self.assertEqual(rc, 47)
+
+    def test_check_call_zero(self) -> None:
+        # check_call() function with zero return code
+        rc = subprocess.check_call([sys.executable, "-c",
+                                    "import sys; sys.exit(0)"])
+        self.assertEqual(rc, 0)
+
+    def test_check_call_nonzero(self) -> None:
+        # check_call() function with non-zero return code
+        with self.assertRaises(subprocess.CalledProcessError) as c:
+            subprocess.check_call([sys.executable, "-c",
+                                   "import sys; sys.exit(47)"])
+        self.assertEqual(c.exception.returncode, 47)
+
+    def test_check_output(self) -> None:
+        # check_output() function with zero return code
+        output = subprocess.check_output(
+                [sys.executable, "-c", "print('BDFL')"])
+        self.assertIn(b'BDFL', cast(Any, output)) # see #39
+
+    def test_check_output_nonzero(self) -> None:
+        # check_call() function with non-zero return code
+        with self.assertRaises(subprocess.CalledProcessError) as c:
+            subprocess.check_output(
+                    [sys.executable, "-c", "import sys; sys.exit(5)"])
+        self.assertEqual(c.exception.returncode, 5)
+
+    def test_check_output_stderr(self) -> None:
+        # check_output() function stderr redirected to stdout
+        output = subprocess.check_output(
+                [sys.executable, "-c", "import sys; sys.stderr.write('BDFL')"],
+                stderr=subprocess.STDOUT)
+        self.assertIn(b'BDFL', cast(Any, output)) # see #39
+
+    def test_check_output_stdout_arg(self) -> None:
+        # check_output() function stderr redirected to stdout
+        with self.assertRaises(ValueError) as c:
+            output = subprocess.check_output(
+                    [sys.executable, "-c", "print('will not be run')"],
+                    stdout=sys.stdout)
+            self.fail("Expected ValueError when stdout arg supplied.")
+        self.assertIn('stdout', c.exception.args[0])
+
+    def test_call_kwargs(self) -> None:
+        # call() function with keyword args
+        newenv = os.environ.copy()
+        newenv["FRUIT"] = "banana"
+        rc = subprocess.call([sys.executable, "-c",
+                              'import sys, os;'
+                              'sys.exit(os.getenv("FRUIT")=="banana")'],
+                             env=newenv)
+        self.assertEqual(rc, 1)
+
+    def test_invalid_args(self) -> None:
+        # Popen() called with invalid arguments should raise TypeError
+        # but Popen.__del__ should not complain (issue #12085)
+        with support.captured_stderr() as s:
+            self.assertRaises(TypeError, subprocess.Popen, invalid_arg_name=1)
+            argcount = subprocess.Popen.__init__.__code__.co_argcount
+            too_many_args = [0] * (argcount + 1)
+            self.assertRaises(TypeError, subprocess.Popen, *too_many_args)
+        self.assertEqual(s.getvalue(), '')
+
+    def test_stdin_none(self) -> None:
+        # .stdin is None when not redirected
+        p = subprocess.Popen([sys.executable, "-c", 'print("banana")'],
+                         stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        self.addCleanup(p.stdout.close)
+        self.addCleanup(p.stderr.close)
+        p.wait()
+        self.assertEqual(p.stdin, None)
+
+    def test_stdout_none(self) -> None:
+        # .stdout is None when not redirected
+        p = subprocess.Popen([sys.executable, "-c",
+                             'print("    this bit of output is from a '
+                             'test of stdout in a different '
+                             'process ...")'],
+                             stdin=subprocess.PIPE, stderr=subprocess.PIPE)
+        self.addCleanup(p.stdin.close)
+        self.addCleanup(p.stderr.close)
+        p.wait()
+        self.assertEqual(p.stdout, None)
+
+    def test_stderr_none(self) -> None:
+        # .stderr is None when not redirected
+        p = subprocess.Popen([sys.executable, "-c", 'print("banana")'],
+                         stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+        self.addCleanup(p.stdout.close)
+        self.addCleanup(p.stdin.close)
+        p.wait()
+        self.assertEqual(p.stderr, None)
+
+    def test_executable_with_cwd(self) -> None:
+        python_dir = os.path.dirname(os.path.realpath(sys.executable))
+        p = subprocess.Popen(["somethingyoudonthave", "-c",
+                              "import sys; sys.exit(47)"],
+                             executable=sys.executable, cwd=python_dir)
+        p.wait()
+        self.assertEqual(p.returncode, 47)
+
+    @unittest.skipIf(sysconfig.is_python_build(),
+                     "need an installed Python. See #7774")
+    def test_executable_without_cwd(self) -> None:
+        # For a normal installation, it should work without 'cwd'
+        # argument.  For test runs in the build directory, see #7774.
+        p = subprocess.Popen(["somethingyoudonthave", "-c",
+                              "import sys; sys.exit(47)"],
+                             executable=sys.executable)
+        p.wait()
+        self.assertEqual(p.returncode, 47)
+
+    def test_stdin_pipe(self) -> None:
+        # stdin redirection
+        p = subprocess.Popen([sys.executable, "-c",
+                         'import sys; sys.exit(sys.stdin.read() == "pear")'],
+                        stdin=subprocess.PIPE)
+        p.stdin.write(b"pear")
+        p.stdin.close()
+        p.wait()
+        self.assertEqual(p.returncode, 1)
+
+    def test_stdin_filedes(self) -> None:
+        # stdin is set to open file descriptor
+        tf = tempfile.TemporaryFile()
+        self.addCleanup(tf.close)
+        d = tf.fileno()
+        os.write(d, b"pear")
+        os.lseek(d, 0, 0)
+        p = subprocess.Popen([sys.executable, "-c",
+                         'import sys; sys.exit(sys.stdin.read() == "pear")'],
+                         stdin=d)
+        p.wait()
+        self.assertEqual(p.returncode, 1)
+
+    def test_stdin_fileobj(self) -> None:
+        # stdin is set to open file object
+        tf = tempfile.TemporaryFile()
+        self.addCleanup(tf.close)
+        tf.write(b"pear")
+        tf.seek(0)
+        p = subprocess.Popen([sys.executable, "-c",
+                         'import sys; sys.exit(sys.stdin.read() == "pear")'],
+                         stdin=tf)
+        p.wait()
+        self.assertEqual(p.returncode, 1)
+
+    def test_stdout_pipe(self) -> None:
+        # stdout redirection
+        p = subprocess.Popen([sys.executable, "-c",
+                          'import sys; sys.stdout.write("orange")'],
+                         stdout=subprocess.PIPE)
+        self.addCleanup(p.stdout.close)
+        self.assertEqual(p.stdout.read(), b"orange")
+
+    def test_stdout_filedes(self) -> None:
+        # stdout is set to open file descriptor
+        tf = tempfile.TemporaryFile()
+        self.addCleanup(tf.close)
+        d = tf.fileno()
+        p = subprocess.Popen([sys.executable, "-c",
+                          'import sys; sys.stdout.write("orange")'],
+                         stdout=d)
+        p.wait()
+        os.lseek(d, 0, 0)
+        self.assertEqual(os.read(d, 1024), b"orange")
+
+    def test_stdout_fileobj(self) -> None:
+        # stdout is set to open file object
+        tf = tempfile.TemporaryFile()
+        self.addCleanup(tf.close)
+        p = subprocess.Popen([sys.executable, "-c",
+                          'import sys; sys.stdout.write("orange")'],
+                         stdout=tf)
+        p.wait()
+        tf.seek(0)
+        self.assertEqual(tf.read(), b"orange")
+
+    def test_stderr_pipe(self) -> None:
+        # stderr redirection
+        p = subprocess.Popen([sys.executable, "-c",
+                          'import sys; sys.stderr.write("strawberry")'],
+                         stderr=subprocess.PIPE)
+        self.addCleanup(p.stderr.close)
+        self.assertStderrEqual(p.stderr.read(), b"strawberry")
+
+    def test_stderr_filedes(self) -> None:
+        # stderr is set to open file descriptor
+        tf = tempfile.TemporaryFile()
+        self.addCleanup(tf.close)
+        d = tf.fileno()
+        p = subprocess.Popen([sys.executable, "-c",
+                          'import sys; sys.stderr.write("strawberry")'],
+                         stderr=d)
+        p.wait()
+        os.lseek(d, 0, 0)
+        self.assertStderrEqual(os.read(d, 1024), b"strawberry")
+
+    def test_stderr_fileobj(self) -> None:
+        # stderr is set to open file object
+        tf = tempfile.TemporaryFile()
+        self.addCleanup(tf.close)
+        p = subprocess.Popen([sys.executable, "-c",
+                          'import sys; sys.stderr.write("strawberry")'],
+                         stderr=tf)
+        p.wait()
+        tf.seek(0)
+        self.assertStderrEqual(tf.read(), b"strawberry")
+
+    def test_stdout_stderr_pipe(self) -> None:
+        # capture stdout and stderr to the same pipe
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys;'
+                              'sys.stdout.write("apple");'
+                              'sys.stdout.flush();'
+                              'sys.stderr.write("orange")'],
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.STDOUT)
+        self.addCleanup(p.stdout.close)
+        self.assertStderrEqual(p.stdout.read(), b"appleorange")
+
+    def test_stdout_stderr_file(self) -> None:
+        # capture stdout and stderr to the same open file
+        tf = tempfile.TemporaryFile()
+        self.addCleanup(tf.close)
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys;'
+                              'sys.stdout.write("apple");'
+                              'sys.stdout.flush();'
+                              'sys.stderr.write("orange")'],
+                             stdout=tf,
+                             stderr=tf)
+        p.wait()
+        tf.seek(0)
+        self.assertStderrEqual(tf.read(), b"appleorange")
+
+    def test_stdout_filedes_of_stdout(self) -> None:
+        # stdout is set to 1 (#1531862).
+        cmd = r"import sys, os; sys.exit(os.write(sys.stdout.fileno(), b'.\n'))"
+        rc = subprocess.call([sys.executable, "-c", cmd], stdout=1)
+        self.assertEqual(rc, 2)
+
+    def test_cwd(self) -> None:
+        tmpdir = tempfile.gettempdir()
+        # We cannot use os.path.realpath to canonicalize the path,
+        # since it doesn't expand Tru64 {memb} strings. See bug 1063571.
+        cwd = os.getcwd()
+        os.chdir(tmpdir)
+        tmpdir = os.getcwd()
+        os.chdir(cwd)
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys,os;'
+                              'sys.stdout.write(os.getcwd())'],
+                             stdout=subprocess.PIPE,
+                             cwd=tmpdir)
+        self.addCleanup(p.stdout.close)
+        normcase = os.path.normcase
+        self.assertEqual(normcase(p.stdout.read().decode("utf-8")),
+                         normcase(tmpdir))
+
+    def test_env(self) -> None:
+        newenv = os.environ.copy()
+        newenv["FRUIT"] = "orange"
+        with subprocess.Popen([sys.executable, "-c",
+                               'import sys,os;'
+                               'sys.stdout.write(os.getenv("FRUIT"))'],
+                              stdout=subprocess.PIPE,
+                              env=newenv) as p:
+            stdout, stderr = p.communicate()
+            self.assertEqual(stdout, b"orange")
+
+    # Windows requires at least the SYSTEMROOT environment variable to start
+    # Python
+    @unittest.skipIf(sys.platform == 'win32',
+                     'cannot test an empty env on Windows')
+    @unittest.skipIf(sysconfig.get_config_var('Py_ENABLE_SHARED') is not None,
+                     'the python library cannot be loaded '
+                     'with an empty environment')
+    def test_empty_env(self) -> None:
+        with subprocess.Popen([sys.executable, "-c",
+                               'import os; '
+                               'print(list(os.environ.keys()))'],
+                              stdout=subprocess.PIPE,
+                              env={}) as p:
+            stdout, stderr = p.communicate()
+            self.assertIn(stdout.strip(),
+                [b"[]",
+                 # Mac OS X adds __CF_USER_TEXT_ENCODING variable to an empty
+                 # environment
+                 b"['__CF_USER_TEXT_ENCODING']"])
+
+    def test_communicate_stdin(self) -> None:
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys;'
+                              'sys.exit(sys.stdin.read() == "pear")'],
+                             stdin=subprocess.PIPE)
+        p.communicate(b"pear")
+        self.assertEqual(p.returncode, 1)
+
+    def test_communicate_stdout(self) -> None:
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys; sys.stdout.write("pineapple")'],
+                             stdout=subprocess.PIPE)
+        (stdout, stderr) = p.communicate()
+        self.assertEqual(stdout, b"pineapple")
+        self.assertEqual(stderr, None)
+
+    def test_communicate_stderr(self) -> None:
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys; sys.stderr.write("pineapple")'],
+                             stderr=subprocess.PIPE)
+        (stdout, stderr) = p.communicate()
+        self.assertEqual(stdout, None)
+        self.assertStderrEqual(stderr, b"pineapple")
+
+    def test_communicate(self) -> None:
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys,os;'
+                              'sys.stderr.write("pineapple");'
+                              'sys.stdout.write(sys.stdin.read())'],
+                             stdin=subprocess.PIPE,
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+        self.addCleanup(p.stdout.close)
+        self.addCleanup(p.stderr.close)
+        self.addCleanup(p.stdin.close)
+        (stdout, stderr) = p.communicate(b"banana")
+        self.assertEqual(stdout, b"banana")
+        self.assertStderrEqual(stderr, b"pineapple")
+
+    # Test for the fd leak reported in http://bugs.python.org/issue2791.
+    def test_communicate_pipe_fd_leak(self) -> None:
+        for stdin_pipe in (False, True):
+            for stdout_pipe in (False, True):
+                for stderr_pipe in (False, True):
+                    options = {}  # type: Dict[str, Any]
+                    if stdin_pipe:
+                        options['stdin'] = subprocess.PIPE
+                    if stdout_pipe:
+                        options['stdout'] = subprocess.PIPE
+                    if stderr_pipe:
+                        options['stderr'] = subprocess.PIPE
+                    if not options:
+                        continue
+                    p = subprocess.Popen([sys.executable, "-c", "pass"], **options)
+                    p.communicate()
+                    if p.stdin is not None:
+                        self.assertTrue(p.stdin.closed)
+                    if p.stdout is not None:
+                        self.assertTrue(p.stdout.closed)
+                    if p.stderr is not None:
+                        self.assertTrue(p.stderr.closed)
+
+    def test_communicate_returns(self) -> None:
+        # communicate() should return None if no redirection is active
+        p = subprocess.Popen([sys.executable, "-c",
+                              "import sys; sys.exit(47)"])
+        (stdout, stderr) = p.communicate()
+        self.assertEqual(stdout, None)
+        self.assertEqual(stderr, None)
+
+    def test_communicate_pipe_buf(self) -> None:
+        # communicate() with writes larger than pipe_buf
+        # This test will probably deadlock rather than fail, if
+        # communicate() does not work properly.
+        x, y = os.pipe()
+        if mswindows:
+            pipe_buf = 512
+        else:
+            pipe_buf = os.fpathconf(x, "PC_PIPE_BUF")
+        os.close(x)
+        os.close(y)
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys,os;'
+                              'sys.stdout.write(sys.stdin.read(47));'
+                              'sys.stderr.write("xyz"*%d);'
+                              'sys.stdout.write(sys.stdin.read())' % pipe_buf],
+                             stdin=subprocess.PIPE,
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+        self.addCleanup(p.stdout.close)
+        self.addCleanup(p.stderr.close)
+        self.addCleanup(p.stdin.close)
+        string_to_write = b"abc"*pipe_buf
+        (stdout, stderr) = p.communicate(string_to_write)
+        self.assertEqual(stdout, string_to_write)
+
+    def test_writes_before_communicate(self) -> None:
+        # stdin.write before communicate()
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys,os;'
+                              'sys.stdout.write(sys.stdin.read())'],
+                             stdin=subprocess.PIPE,
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+        self.addCleanup(p.stdout.close)
+        self.addCleanup(p.stderr.close)
+        self.addCleanup(p.stdin.close)
+        p.stdin.write(b"banana")
+        (stdout, stderr) = p.communicate(b"split")
+        self.assertEqual(stdout, b"bananasplit")
+        self.assertStderrEqual(stderr, b"")
+
+    def test_universal_newlines(self) -> None:
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys,os;' + SETBINARY +
+                              'sys.stdout.write(sys.stdin.readline());'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("line2\\n");'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write(sys.stdin.read());'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("line4\\n");'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("line5\\r\\n");'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("line6\\r");'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("\\nline7");'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("\\nline8");'],
+                             stdin=subprocess.PIPE,
+                             stdout=subprocess.PIPE,
+                             universal_newlines=1)
+        p.stdin.write("line1\n")
+        self.assertEqual(p.stdout.readline(), "line1\n")
+        p.stdin.write("line3\n")
+        p.stdin.close()
+        self.addCleanup(p.stdout.close)
+        self.assertEqual(p.stdout.readline(),
+                         "line2\n")
+        self.assertEqual(p.stdout.read(6),
+                         "line3\n")
+        self.assertEqual(p.stdout.read(),
+                         "line4\nline5\nline6\nline7\nline8")
+
+    def test_universal_newlines_communicate(self) -> None:
+        # universal newlines through communicate()
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys,os;' + SETBINARY +
+                              'sys.stdout.write("line2\\n");'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("line4\\n");'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("line5\\r\\n");'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("line6\\r");'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("\\nline7");'
+                              'sys.stdout.flush();'
+                              'sys.stdout.write("\\nline8");'],
+                             stderr=subprocess.PIPE,
+                             stdout=subprocess.PIPE,
+                             universal_newlines=1)
+        self.addCleanup(p.stdout.close)
+        self.addCleanup(p.stderr.close)
+        # BUG: can't give a non-empty stdin because it breaks both the
+        # select- and poll-based communicate() implementations.
+        (stdout, stderr) = p.communicate()
+        self.assertEqual(stdout,
+                         "line2\nline4\nline5\nline6\nline7\nline8")
+
+    def test_universal_newlines_communicate_stdin(self) -> None:
+        # universal newlines through communicate(), with only stdin
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys,os;' + SETBINARY + '''\nif True:
+                                  s = sys.stdin.readline()
+                                  assert s == "line1\\n", repr(s)
+                                  s = sys.stdin.read()
+                                  assert s == "line3\\n", repr(s)
+                              '''],
+                             stdin=subprocess.PIPE,
+                             universal_newlines=1)
+        (stdout, stderr) = p.communicate("line1\nline3\n")
+        self.assertEqual(p.returncode, 0)
+
+    def test_no_leaking(self) -> None:
+        # Make sure we leak no resources
+        if not mswindows:
+            max_handles = 1026 # too much for most UNIX systems
+        else:
+            max_handles = 2050 # too much for (at least some) Windows setups
+        handles = []  # type: List[int]
+        tmpdir = tempfile.mkdtemp()
+        try:
+            for i in range(max_handles):
+                try:
+                    tmpfile = os.path.join(tmpdir, support.TESTFN)
+                    handles.append(os.open(tmpfile, os.O_WRONLY|os.O_CREAT))
+                except OSError as e:
+                    if e.errno != errno.EMFILE:
+                        raise
+                    break
+            else:
+                self.skipTest("failed to reach the file descriptor limit "
+                    "(tried %d)" % max_handles)
+            # Close a couple of them (should be enough for a subprocess)
+            for i in range(10):
+                os.close(handles.pop())
+            # Loop creating some subprocesses. If one of them leaks some fds,
+            # the next loop iteration will fail by reaching the max fd limit.
+            for i in range(15):
+                p = subprocess.Popen([sys.executable, "-c",
+                                      "import sys;"
+                                      "sys.stdout.write(sys.stdin.read())"],
+                                     stdin=subprocess.PIPE,
+                                     stdout=subprocess.PIPE,
+                                     stderr=subprocess.PIPE)
+                data = p.communicate(b"lime")[0]
+                self.assertEqual(data, b"lime")
+        finally:
+            for h in handles:
+                os.close(h)
+            shutil.rmtree(tmpdir)
+
+    def test_list2cmdline(self) -> None:
+        self.assertEqual(subprocess.list2cmdline(['a b c', 'd', 'e']),
+                         '"a b c" d e')
+        self.assertEqual(subprocess.list2cmdline(['ab"c', '\\', 'd']),
+                         'ab\\"c \\ d')
+        self.assertEqual(subprocess.list2cmdline(['ab"c', ' \\', 'd']),
+                         'ab\\"c " \\\\" d')
+        self.assertEqual(subprocess.list2cmdline(['a\\\\\\b', 'de fg', 'h']),
+                         'a\\\\\\b "de fg" h')
+        self.assertEqual(subprocess.list2cmdline(['a\\"b', 'c', 'd']),
+                         'a\\\\\\"b c d')
+        self.assertEqual(subprocess.list2cmdline(['a\\\\b c', 'd', 'e']),
+                         '"a\\\\b c" d e')
+        self.assertEqual(subprocess.list2cmdline(['a\\\\b\\ c', 'd', 'e']),
+                         '"a\\\\b\\ c" d e')
+        self.assertEqual(subprocess.list2cmdline(['ab', '']),
+                         'ab ""')
+
+
+    def test_poll(self) -> None:
+        p = subprocess.Popen([sys.executable,
+                          "-c", "import time; time.sleep(1)"])
+        count = 0
+        while p.poll() is None:
+            time.sleep(0.1)
+            count += 1
+        # We expect that the poll loop probably went around about 10 times,
+        # but, based on system scheduling we can't control, it's possible
+        # poll() never returned None.  It "should be" very rare that it
+        # didn't go around at least twice.
+        self.assertGreaterEqual(count, 2)
+        # Subsequent invocations should just return the returncode
+        self.assertEqual(p.poll(), 0)
+
+
+    def test_wait(self) -> None:
+        p = subprocess.Popen([sys.executable,
+                          "-c", "import time; time.sleep(2)"])
+        self.assertEqual(p.wait(), 0)
+        # Subsequent invocations should just return the returncode
+        self.assertEqual(p.wait(), 0)
+
+
+    def test_invalid_bufsize(self) -> None:
+        # an invalid type of the bufsize argument should raise
+        # TypeError.
+        with self.assertRaises(TypeError):
+            subprocess.Popen([sys.executable, "-c", "pass"], cast(Any, "orange"))
+
+    def test_bufsize_is_none(self) -> None:
+        # bufsize=None should be the same as bufsize=0.
+        p = subprocess.Popen([sys.executable, "-c", "pass"], None)
+        self.assertEqual(p.wait(), 0)
+        # Again with keyword arg
+        p = subprocess.Popen([sys.executable, "-c", "pass"], bufsize=None)
+        self.assertEqual(p.wait(), 0)
+
+    def test_leaking_fds_on_error(self) -> None:
+        # see bug #5179: Popen leaks file descriptors to PIPEs if
+        # the child fails to execute; this will eventually exhaust
+        # the maximum number of open fds. 1024 seems a very common
+        # value for that limit, but Windows has 2048, so we loop
+        # 1024 times (each call leaked two fds).
+        for i in range(1024):
+            # Windows raises IOError.  Others raise OSError.
+            with self.assertRaises(EnvironmentError) as c:
+                subprocess.Popen(['nonexisting_i_hope'],
+                                 stdout=subprocess.PIPE,
+                                 stderr=subprocess.PIPE)
+            # ignore errors that indicate the command was not found
+            if c.exception.errno not in (errno.ENOENT, errno.EACCES):
+                raise c.exception
+
+    def test_issue8780(self) -> None:
+        # Ensure that stdout is inherited from the parent
+        # if stdout=PIPE is not used
+        code = ';'.join([
+            'import subprocess, sys',
+            'retcode = subprocess.call('
+                "[sys.executable, '-c', 'print(\"Hello World!\")'])",
+            'assert retcode == 0'])
+        output = subprocess.check_output([sys.executable, '-c', code])
+        self.assertTrue(output.startswith(b'Hello World!'), ascii(output))
+
+    def test_handles_closed_on_exception(self) -> None:
+        # If CreateProcess exits with an error, ensure the
+        # duplicate output handles are released
+        ifhandle, ifname = mkstemp()
+        ofhandle, ofname = mkstemp()
+        efhandle, efname = mkstemp()
+        try:
+            subprocess.Popen (["*"], stdin=ifhandle, stdout=ofhandle,
+              stderr=efhandle)
+        except OSError:
+            os.close(ifhandle)
+            os.remove(ifname)
+            os.close(ofhandle)
+            os.remove(ofname)
+            os.close(efhandle)
+            os.remove(efname)
+        self.assertFalse(os.path.exists(ifname))
+        self.assertFalse(os.path.exists(ofname))
+        self.assertFalse(os.path.exists(efname))
+
+    def test_communicate_epipe(self) -> None:
+        # Issue 10963: communicate() should hide EPIPE
+        p = subprocess.Popen([sys.executable, "-c", 'pass'],
+                             stdin=subprocess.PIPE,
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+        self.addCleanup(p.stdout.close)
+        self.addCleanup(p.stderr.close)
+        self.addCleanup(p.stdin.close)
+        p.communicate(b"x" * 2**20)
+
+    def test_communicate_epipe_only_stdin(self) -> None:
+        # Issue 10963: communicate() should hide EPIPE
+        p = subprocess.Popen([sys.executable, "-c", 'pass'],
+                             stdin=subprocess.PIPE)
+        self.addCleanup(p.stdin.close)
+        time.sleep(2)
+        p.communicate(b"x" * 2**20)
+
+    @unittest.skipUnless(hasattr(signal, 'SIGALRM'),
+                         "Requires signal.SIGALRM")
+    def test_communicate_eintr(self) -> None:
+        # Issue #12493: communicate() should handle EINTR
+        def handler(signum, frame):
+            pass
+        old_handler = signal.signal(signal.SIGALRM, handler)
+        self.addCleanup(signal.signal, signal.SIGALRM, old_handler)
+
+        # the process is running for 2 seconds
+        args = [sys.executable, "-c", 'import time; time.sleep(2)']
+        for stream in ('stdout', 'stderr'):
+            kw = {stream: subprocess.PIPE}  # type: Dict[str, Any]
+            with subprocess.Popen(args, **kw) as process:
+                signal.alarm(1)
+                # communicate() will be interrupted by SIGALRM
+                process.communicate()
+
+
+# context manager
+class _SuppressCoreFiles(object):
+    """Try to prevent core files from being created."""
+    old_limit = None # type: Tuple[int, int]
+
+    def __enter__(self) -> None:
+        """Try to save previous ulimit, then set it to (0, 0)."""
+        if resource is not None:
+            try:
+                self.old_limit = resource.getrlimit(resource.RLIMIT_CORE)
+                resource.setrlimit(resource.RLIMIT_CORE, (0, 0))
+            except (ValueError, resource.error):
+                pass
+
+        if sys.platform == 'darwin':
+            # Check if the 'Crash Reporter' on OSX was configured
+            # in 'Developer' mode and warn that it will get triggered
+            # when it is.
+            #
+            # This assumes that this context manager is used in tests
+            # that might trigger the next manager.
+            value = subprocess.Popen(['/usr/bin/defaults', 'read',
+                    'com.apple.CrashReporter', 'DialogType'],
+                    stdout=subprocess.PIPE).communicate()[0]
+            if value.strip() == b'developer':
+                print("this tests triggers the Crash Reporter, "
+                      "that is intentional", end='')
+                sys.stdout.flush()
+
+    def __exit__(self, *args: Any) -> None:
+        """Return core file behavior to default."""
+        if self.old_limit is None:
+            return
+        if resource is not None:
+            try:
+                resource.setrlimit(resource.RLIMIT_CORE, self.old_limit)
+            except (ValueError, resource.error):
+                pass
+
+
+ at unittest.skipIf(mswindows, "POSIX specific tests")
+class POSIXProcessTestCase(BaseTestCase):
+
+    def test_exceptions(self) -> None:
+        nonexistent_dir = "/_this/pa.th/does/not/exist"
+        try:
+            os.chdir(nonexistent_dir)
+        except OSError as e:
+            # This avoids hard coding the errno value or the OS perror()
+            # string and instead capture the exception that we want to see
+            # below for comparison.
+            desired_exception = e
+            desired_exception.strerror += ': ' + repr(sys.executable)
+        else:
+            self.fail("chdir to nonexistant directory %s succeeded." %
+                      nonexistent_dir)
+
+        # Error in the child re-raised in the parent.
+        try:
+            p = subprocess.Popen([sys.executable, "-c", ""],
+                                 cwd=nonexistent_dir)
+        except OSError as e:
+            # Test that the child process chdir failure actually makes
+            # it up to the parent process as the correct exception.
+            self.assertEqual(desired_exception.errno, e.errno)
+            self.assertEqual(desired_exception.strerror, e.strerror)
+        else:
+            self.fail("Expected OSError: %s" % desired_exception)
+
+    def test_restore_signals(self) -> None:
+        # Code coverage for both values of restore_signals to make sure it
+        # at least does not blow up.
+        # A test for behavior would be complex.  Contributions welcome.
+        subprocess.call([sys.executable, "-c", ""], restore_signals=True)
+        subprocess.call([sys.executable, "-c", ""], restore_signals=False)
+
+    def test_start_new_session(self) -> None:
+        # For code coverage of calling setsid().  We don't care if we get an
+        # EPERM error from it depending on the test execution environment, that
+        # still indicates that it was called.
+        try:
+            output = subprocess.check_output(
+                    [sys.executable, "-c",
+                     "import os; print(os.getpgid(os.getpid()))"],
+                    start_new_session=True)
+        except OSError as e:
+            if e.errno != errno.EPERM:
+                raise
+        else:
+            parent_pgid = os.getpgid(os.getpid())
+            child_pgid = int(output)
+            self.assertNotEqual(parent_pgid, child_pgid)
+
+    def test_run_abort(self) -> None:
+        # returncode handles signal termination
+        with _SuppressCoreFiles():
+            p = subprocess.Popen([sys.executable, "-c",
+                                  'import os; os.abort()'])
+            p.wait()
+        self.assertEqual(-p.returncode, signal.SIGABRT)
+
+    def test_preexec(self) -> None:
+        # DISCLAIMER: Setting environment variables is *not* a good use
+        # of a preexec_fn.  This is merely a test.
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys,os;'
+                              'sys.stdout.write(os.getenv("FRUIT"))'],
+                             stdout=subprocess.PIPE,
+                             preexec_fn=lambda: os.putenv("FRUIT", "apple"))
+        self.addCleanup(p.stdout.close)
+        self.assertEqual(p.stdout.read(), b"apple")
+
+    def test_preexec_exception(self) -> None:
+        def raise_it():
+            raise ValueError("What if two swallows carried a coconut?")
+        try:
+            p = subprocess.Popen([sys.executable, "-c", ""],
+                                 preexec_fn=raise_it)
+        except RuntimeError as e:
+            self.assertTrue(
+                    subprocess._posixsubprocess,
+                    "Expected a ValueError from the preexec_fn")
+        except ValueError as e2:
+            self.assertIn("coconut", e2.args[0])
+        else:
+            self.fail("Exception raised by preexec_fn did not make it "
+                      "to the parent process.")
+
+    def test_preexec_gc_module_failure(self) -> None:
+        # This tests the code that disables garbage collection if the child
+        # process will execute any Python.
+        def raise_runtime_error():
+            raise RuntimeError("this shouldn't escape")
+        enabled = gc.isenabled()
+        orig_gc_disable = gc.disable
+        orig_gc_isenabled = gc.isenabled
+        try:
+            gc.disable()
+            self.assertFalse(gc.isenabled())
+            subprocess.call([sys.executable, '-c', ''],
+                            preexec_fn=lambda: None)
+            self.assertFalse(gc.isenabled(),
+                             "Popen enabled gc when it shouldn't.")
+
+            gc.enable()
+            self.assertTrue(gc.isenabled())
+            subprocess.call([sys.executable, '-c', ''],
+                            preexec_fn=lambda: None)
+            self.assertTrue(gc.isenabled(), "Popen left gc disabled.")
+
+            setattr(gc, 'disable', raise_runtime_error)
+            self.assertRaises(RuntimeError, subprocess.Popen,
+                              [sys.executable, '-c', ''],
+                              preexec_fn=lambda: None)
+
+            del gc.isenabled  # force an AttributeError
+            self.assertRaises(AttributeError, subprocess.Popen,
+                              [sys.executable, '-c', ''],
+                              preexec_fn=lambda: None)
+        finally:
+            setattr(gc, 'disable', orig_gc_disable)
+            setattr(gc, 'isenabled', orig_gc_isenabled)
+            if not enabled:
+                gc.disable()
+
+    def test_args_string(self) -> None:
+        # args is a string
+        fd, fname = mkstemp()
+        # reopen in text mode
+        with open(fd, "w", errors=cast(Any, "surrogateescape")) as fobj: # see #260
+            fobj.write("#!/bin/sh\n")
+            fobj.write("exec '%s' -c 'import sys; sys.exit(47)'\n" %
+                       sys.executable)
+        os.chmod(fname, 0o700)
+        p = subprocess.Popen(fname)
+        p.wait()
+        os.remove(fname)
+        self.assertEqual(p.returncode, 47)
+
+    def test_invalid_args(self) -> None:
+        # invalid arguments should raise ValueError
+        self.assertRaises(ValueError, subprocess.call,
+                          [sys.executable, "-c",
+                           "import sys; sys.exit(47)"],
+                          startupinfo=47)
+        self.assertRaises(ValueError, subprocess.call,
+                          [sys.executable, "-c",
+                           "import sys; sys.exit(47)"],
+                          creationflags=47)
+
+    def test_shell_sequence(self) -> None:
+        # Run command through the shell (sequence)
+        newenv = os.environ.copy()
+        newenv["FRUIT"] = "apple"
+        p = subprocess.Popen(["echo $FRUIT"], shell=1,
+                             stdout=subprocess.PIPE,
+                             env=newenv)
+        self.addCleanup(p.stdout.close)
+        self.assertEqual(p.stdout.read().strip(b" \t\r\n\f"), b"apple")
+
+    def test_shell_string(self) -> None:
+        # Run command through the shell (string)
+        newenv = os.environ.copy()
+        newenv["FRUIT"] = "apple"
+        p = subprocess.Popen("echo $FRUIT", shell=1,
+                             stdout=subprocess.PIPE,
+                             env=newenv)
+        self.addCleanup(p.stdout.close)
+        self.assertEqual(p.stdout.read().strip(b" \t\r\n\f"), b"apple")
+
+    def test_call_string(self) -> None:
+        # call() function with string argument on UNIX
+        fd, fname = mkstemp()
+        # reopen in text mode
+        with open(fd, "w", errors=cast(Any, "surrogateescape")) as fobj: # see #260
+            fobj.write("#!/bin/sh\n")
+            fobj.write("exec '%s' -c 'import sys; sys.exit(47)'\n" %
+                       sys.executable)
+        os.chmod(fname, 0o700)
+        rc = subprocess.call(fname)
+        os.remove(fname)
+        self.assertEqual(rc, 47)
+
+    def test_specific_shell(self) -> None:
+        # Issue #9265: Incorrect name passed as arg[0].
+        shells = []  # type: List[str]
+        for prefix in ['/bin', '/usr/bin/', '/usr/local/bin']:
+            for name in ['bash', 'ksh']:
+                sh = os.path.join(prefix, name)
+                if os.path.isfile(sh):
+                    shells.append(sh)
+        if not shells: # Will probably work for any shell but csh.
+            self.skipTest("bash or ksh required for this test")
+        sh = '/bin/sh'
+        if os.path.isfile(sh) and not os.path.islink(sh):
+            # Test will fail if /bin/sh is a symlink to csh.
+            shells.append(sh)
+        for sh in shells:
+            p = subprocess.Popen("echo $0", executable=sh, shell=True,
+                                 stdout=subprocess.PIPE)
+            self.addCleanup(p.stdout.close)
+            self.assertEqual(p.stdout.read().strip(), bytes(sh, 'ascii'))
+
+    def _kill_process(self, method: str, *args: Any) -> subprocess.Popen:
+        # Do not inherit file handles from the parent.
+        # It should fix failures on some platforms.
+        p = subprocess.Popen([sys.executable, "-c", """if 1:
+                             import sys, time
+                             sys.stdout.write('x\\n')
+                             sys.stdout.flush()
+                             time.sleep(30)
+                             """],
+                             close_fds=True,
+                             stdin=subprocess.PIPE,
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+        # Wait for the interpreter to be completely initialized before
+        # sending any signal.
+        p.stdout.read(1)
+        getattr(p, method)(*args)
+        return p
+
+    def test_send_signal(self) -> None:
+        p = self._kill_process('send_signal', signal.SIGINT)
+        _, stderr = p.communicate()
+        self.assertIn(b'KeyboardInterrupt', stderr)
+        self.assertNotEqual(p.wait(), 0)
+
+    def test_kill(self) -> None:
+        p = self._kill_process('kill')
+        _, stderr = p.communicate()
+        self.assertStderrEqual(stderr, b'')
+        self.assertEqual(p.wait(), -signal.SIGKILL)
+
+    def test_terminate(self) -> None:
+        p = self._kill_process('terminate')
+        _, stderr = p.communicate()
+        self.assertStderrEqual(stderr, b'')
+        self.assertEqual(p.wait(), -signal.SIGTERM)
+
+    def check_close_std_fds(self, fds: Iterable[int]) -> None:
+        # Issue #9905: test that subprocess pipes still work properly with
+        # some standard fds closed
+        stdin = 0
+        newfds = []  # type: List[int]
+        for a in fds:
+            b = os.dup(a)
+            newfds.append(b)
+            if a == 0:
+                stdin = b
+        try:
+            for fd in fds:
+                os.close(fd)
+            out, err = subprocess.Popen([sys.executable, "-c",
+                              'import sys;'
+                              'sys.stdout.write("apple");'
+                              'sys.stdout.flush();'
+                              'sys.stderr.write("orange")'],
+                       stdin=stdin,
+                       stdout=subprocess.PIPE,
+                       stderr=subprocess.PIPE).communicate()
+            err = support.strip_python_stderr(err)
+            self.assertEqual((out, err), (b'apple', b'orange'))
+        finally:
+            for b, a in zip(newfds, fds):
+                os.dup2(b, a)
+            for b in newfds:
+                os.close(b)
+
+    def test_close_fd_0(self) -> None:
+        self.check_close_std_fds([0])
+
+    def test_close_fd_1(self) -> None:
+        self.check_close_std_fds([1])
+
+    def test_close_fd_2(self) -> None:
+        self.check_close_std_fds([2])
+
+    def test_close_fds_0_1(self) -> None:
+        self.check_close_std_fds([0, 1])
+
+    def test_close_fds_0_2(self) -> None:
+        self.check_close_std_fds([0, 2])
+
+    def test_close_fds_1_2(self) -> None:
+        self.check_close_std_fds([1, 2])
+
+    def test_close_fds_0_1_2(self) -> None:
+        # Issue #10806: test that subprocess pipes still work properly with
+        # all standard fds closed.
+        self.check_close_std_fds([0, 1, 2])
+
+    def test_remapping_std_fds(self) -> None:
+        # open up some temporary files
+        temps = [mkstemp() for i in range(3)]
+        try:
+            temp_fds = [fd for fd, fname in temps]
+
+            # unlink the files -- we won't need to reopen them
+            for fd, fname in temps:
+                os.unlink(fname)
+
+            # write some data to what will become stdin, and rewind
+            os.write(temp_fds[1], b"STDIN")
+            os.lseek(temp_fds[1], 0, 0)
+
+            # move the standard file descriptors out of the way
+            saved_fds = [os.dup(fd) for fd in range(3)]
+            try:
+                # duplicate the file objects over the standard fd's
+                for fd, temp_fd in enumerate(temp_fds):
+                    os.dup2(temp_fd, fd)
+
+                # now use those files in the "wrong" order, so that subprocess
+                # has to rearrange them in the child
+                p = subprocess.Popen([sys.executable, "-c",
+                    'import sys; got = sys.stdin.read();'
+                    'sys.stdout.write("got %s"%got); sys.stderr.write("err")'],
+                    stdin=temp_fds[1],
+                    stdout=temp_fds[2],
+                    stderr=temp_fds[0])
+                p.wait()
+            finally:
+                # restore the original fd's underneath sys.stdin, etc.
+                for std, saved in enumerate(saved_fds):
+                    os.dup2(saved, std)
+                    os.close(saved)
+
+            for fd in temp_fds:
+                os.lseek(fd, 0, 0)
+
+            out = os.read(temp_fds[2], 1024)
+            err = support.strip_python_stderr(os.read(temp_fds[0], 1024))
+            self.assertEqual(out, b"got STDIN")
+            self.assertEqual(err, b"err")
+
+        finally:
+            for fd in temp_fds:
+                os.close(fd)
+
+    def check_swap_fds(self, stdin_no: int, stdout_no: int,
+                       stderr_no: int) -> None:
+        # open up some temporary files
+        temps = [mkstemp() for i in range(3)]
+        temp_fds = [fd for fd, fname in temps]
+        try:
+            # unlink the files -- we won't need to reopen them
+            for fd, fname in temps:
+                os.unlink(fname)
+
+            # save a copy of the standard file descriptors
+            saved_fds = [os.dup(fd) for fd in range(3)]
+            try:
+                # duplicate the temp files over the standard fd's 0, 1, 2
+                for fd, temp_fd in enumerate(temp_fds):
+                    os.dup2(temp_fd, fd)
+
+                # write some data to what will become stdin, and rewind
+                os.write(stdin_no, b"STDIN")
+                os.lseek(stdin_no, 0, 0)
+
+                # now use those files in the given order, so that subprocess
+                # has to rearrange them in the child
+                p = subprocess.Popen([sys.executable, "-c",
+                    'import sys; got = sys.stdin.read();'
+                    'sys.stdout.write("got %s"%got); sys.stderr.write("err")'],
+                    stdin=stdin_no,
+                    stdout=stdout_no,
+                    stderr=stderr_no)
+                p.wait()
+
+                for fd in temp_fds:
+                    os.lseek(fd, 0, 0)
+
+                out = os.read(stdout_no, 1024)
+                err = support.strip_python_stderr(os.read(stderr_no, 1024))
+            finally:
+                for std, saved in enumerate(saved_fds):
+                    os.dup2(saved, std)
+                    os.close(saved)
+
+            self.assertEqual(out, b"got STDIN")
+            self.assertEqual(err, b"err")
+
+        finally:
+            for fd in temp_fds:
+                os.close(fd)
+
+    # When duping fds, if there arises a situation where one of the fds is
+    # either 0, 1 or 2, it is possible that it is overwritten (#12607).
+    # This tests all combinations of this.
+    def test_swap_fds(self) -> None:
+        self.check_swap_fds(0, 1, 2)
+        self.check_swap_fds(0, 2, 1)
+        self.check_swap_fds(1, 0, 2)
+        self.check_swap_fds(1, 2, 0)
+        self.check_swap_fds(2, 0, 1)
+        self.check_swap_fds(2, 1, 0)
+
+    def test_surrogates_error_message(self) -> None:
+        def prepare() -> None:
+            raise ValueError("surrogate:\uDCff")
+
+        try:
+            subprocess.call(
+                [sys.executable, "-c", "pass"],
+                preexec_fn=prepare)
+        except ValueError as err:
+            # Pure Python implementations keeps the message
+            self.assertIsNone(subprocess._posixsubprocess)
+            self.assertEqual(str(err), "surrogate:\uDCff")
+        except RuntimeError as err2:
+            # _posixsubprocess uses a default message
+            self.assertIsNotNone(subprocess._posixsubprocess)
+            self.assertEqual(str(err2), "Exception occurred in preexec_fn.")
+        else:
+            self.fail("Expected ValueError or RuntimeError")
+
+    def test_undecodable_env(self) -> None:
+        for key, value in (('test', 'abc\uDCFF'), ('test\uDCFF', '42')):
+            # test str with surrogates
+            script = "import os; print(ascii(os.getenv(%s)))" % repr(key)
+            env = os.environ.copy()
+            env[key] = value
+            # Use C locale to get ascii for the locale encoding to force
+            # surrogate-escaping of \xFF in the child process; otherwise it can
+            # be decoded as-is if the default locale is latin-1.
+            env['LC_ALL'] = 'C'
+            stdout = subprocess.check_output(
+                [sys.executable, "-c", script],
+                env=env)
+            stdout = stdout.rstrip(b'\n\r')
+            self.assertEqual(stdout.decode('ascii'), ascii(value))
+
+            # test bytes
+            keyb = key.encode("ascii", "surrogateescape")
+            valueb = value.encode("ascii", "surrogateescape")
+            script = "import os; print(ascii(os.getenvb(%s)))" % repr(keyb)
+            envb = dict(os.environ.copy().items())  # type: Dict[Any, Any]
+            envb[keyb] = valueb
+            stdout = subprocess.check_output(
+                [sys.executable, "-c", script],
+                env=envb)
+            stdout = stdout.rstrip(b'\n\r')
+            self.assertEqual(stdout.decode('ascii'), ascii(valueb))
+
+    def test_bytes_program(self) -> None:
+        abs_program = os.fsencode(sys.executable)
+        path, programs = os.path.split(sys.executable)
+        program = os.fsencode(programs)
+
+        # absolute bytes path
+        exitcode = subprocess.call([abs_program, "-c", "pass"])
+        self.assertEqual(exitcode, 0)
+
+        # bytes program, unicode PATH
+        env = os.environ.copy()
+        env["PATH"] = path
+        exitcode = subprocess.call([program, "-c", "pass"], env=env)
+        self.assertEqual(exitcode, 0)
+
+        # bytes program, bytes PATH
+        envb = os.environb.copy()
+        envb[b"PATH"] = os.fsencode(path)
+        exitcode = subprocess.call([program, "-c", "pass"], env=envb)
+        self.assertEqual(exitcode, 0)
+
+    def test_pipe_cloexec(self) -> None:
+        sleeper = support.findfile("input_reader.py", subdir="subprocessdata")
+        fd_status = support.findfile("fd_status.py", subdir="subprocessdata")
+
+        p1 = subprocess.Popen([sys.executable, sleeper],
+                              stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+                              stderr=subprocess.PIPE, close_fds=False)
+
+        self.addCleanup(p1.communicate, b'')
+
+        p2 = subprocess.Popen([sys.executable, fd_status],
+                              stdout=subprocess.PIPE, close_fds=False)
+
+        output, error = p2.communicate()
+        result_fds = set(map(int, output.split(b',')))
+        unwanted_fds = set([p1.stdin.fileno(), p1.stdout.fileno(),
+                            p1.stderr.fileno()])
+
+        self.assertFalse(result_fds & unwanted_fds,
+                         "Expected no fds from %r to be open in child, "
+                         "found %r" %
+                              (unwanted_fds, result_fds & unwanted_fds))
+
+    def test_pipe_cloexec_real_tools(self) -> None:
+        qcat = support.findfile("qcat.py", subdir="subprocessdata")
+        qgrep = support.findfile("qgrep.py", subdir="subprocessdata")
+
+        subdata = b'zxcvbn'
+        data = subdata * 4 + b'\n'
+
+        p1 = subprocess.Popen([sys.executable, qcat],
+                              stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+                              close_fds=False)
+
+        p2 = subprocess.Popen([sys.executable, qgrep, subdata],
+                              stdin=p1.stdout, stdout=subprocess.PIPE,
+                              close_fds=False)
+
+        self.addCleanup(p1.wait)
+        self.addCleanup(p2.wait)
+        def kill_p1() -> None:
+            #try:
+            p1.terminate()
+            #except ProcessLookupError:
+            #    pass
+        def kill_p2() -> None:
+            #try:
+            p2.terminate()
+            #except ProcessLookupError:
+            #    pass
+        self.addCleanup(kill_p1)
+        self.addCleanup(kill_p2)
+
+        p1.stdin.write(data)
+        p1.stdin.close()
+
+        readfiles, ignored1, ignored2 = select.select([p2.stdout], [], [], 10)
+
+        self.assertTrue(readfiles, "The child hung")
+        self.assertEqual(p2.stdout.read(), data)
+
+        p1.stdout.close()
+        p2.stdout.close()
+
+    def test_close_fds(self) -> None:
+        fd_status = support.findfile("fd_status.py", subdir="subprocessdata")
+
+        fds = os.pipe()
+        self.addCleanup(os.close, fds[0])
+        self.addCleanup(os.close, fds[1])
+
+        open_fds = set([fds[0], fds[1]])
+        # add a bunch more fds
+        for _ in range(9):
+            fd = os.open("/dev/null", os.O_RDONLY)
+            self.addCleanup(os.close, fd)
+            open_fds.add(fd)
+
+        p = subprocess.Popen([sys.executable, fd_status],
+                             stdout=subprocess.PIPE, close_fds=False)
+        output, ignored = p.communicate()
+        remaining_fds = set(map(int, output.split(b',')))
+
+        self.assertEqual(remaining_fds & open_fds, open_fds,
+                         "Some fds were closed")
+
+        p = subprocess.Popen([sys.executable, fd_status],
+                             stdout=subprocess.PIPE, close_fds=True)
+        output, ignored = p.communicate()
+        remaining_fds = set(map(int, output.split(b',')))
+
+        self.assertFalse(remaining_fds & open_fds,
+                         "Some fds were left open")
+        self.assertIn(1, remaining_fds, "Subprocess failed")
+
+        # Keep some of the fd's we opened open in the subprocess.
+        # This tests _posixsubprocess.c's proper handling of fds_to_keep.
+        fds_to_keep = set(open_fds.pop() for _ in range(8))
+        p = subprocess.Popen([sys.executable, fd_status],
+                             stdout=subprocess.PIPE, close_fds=True,
+                             pass_fds=())
+        output, ignored = p.communicate()
+        remaining_fds = set(map(int, output.split(b',')))
+
+        self.assertFalse(remaining_fds & fds_to_keep & open_fds,
+                         "Some fds not in pass_fds were left open")
+        self.assertIn(1, remaining_fds, "Subprocess failed")
+
+    # Mac OS X Tiger (10.4) has a kernel bug: sometimes, the file
+    # descriptor of a pipe closed in the parent process is valid in the
+    # child process according to fstat(), but the mode of the file
+    # descriptor is invalid, and read or write raise an error.
+    @support.requires_mac_ver(10, 5)
+    def test_pass_fds(self) -> None:
+        fd_status = support.findfile("fd_status.py", subdir="subprocessdata")
+
+        open_fds = set()  # type: Set[int]
+
+        for x in range(5):
+            fds = os.pipe()
+            self.addCleanup(os.close, fds[0])
+            self.addCleanup(os.close, fds[1])
+            open_fds.update([fds[0], fds[1]])
+
+        for fd in open_fds:
+            p = subprocess.Popen([sys.executable, fd_status],
+                                 stdout=subprocess.PIPE, close_fds=True,
+                                 pass_fds=(fd, ))
+            output, ignored = p.communicate()
+
+            remaining_fds = set(map(int, output.split(b',')))
+            to_be_closed = open_fds - {fd}
+
+            self.assertIn(fd, remaining_fds, "fd to be passed not passed")
+            self.assertFalse(remaining_fds & to_be_closed,
+                             "fd to be closed passed")
+
+            # pass_fds overrides close_fds with a warning.
+            with self.assertWarns(RuntimeWarning) as context:
+                self.assertFalse(subprocess.call(
+                        [sys.executable, "-c", "import sys; sys.exit(0)"],
+                        close_fds=False, pass_fds=(fd, )))
+            self.assertIn('overriding close_fds', str(context.warning))
+
+    def test_stdout_stdin_are_single_inout_fd(self) -> None:
+        with io.open(os.devnull, "r+") as inout:
+            p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(0)"],
+                                 stdout=inout, stdin=inout)
+            p.wait()
+
+    def test_stdout_stderr_are_single_inout_fd(self) -> None:
+        with io.open(os.devnull, "r+") as inout:
+            p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(0)"],
+                                 stdout=inout, stderr=inout)
+            p.wait()
+
+    def test_stderr_stdin_are_single_inout_fd(self) -> None:
+        with io.open(os.devnull, "r+") as inout:
+            p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(0)"],
+                                 stderr=inout, stdin=inout)
+            p.wait()
+
+    def test_wait_when_sigchild_ignored(self) -> None:
+        # NOTE: sigchild_ignore.py may not be an effective test on all OSes.
+        sigchild_ignore = support.findfile("sigchild_ignore.py",
+                                           subdir="subprocessdata")
+        p = subprocess.Popen([sys.executable, sigchild_ignore],
+                             stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        stdout, stderr = p.communicate()
+        self.assertEqual(0, p.returncode, "sigchild_ignore.py exited"
+                         " non-zero with this error:\n%s" %
+                         stderr.decode('utf8'))
+
+    def test_select_unbuffered(self) -> None:
+        # Issue #11459: bufsize=0 should really set the pipes as
+        # unbuffered (and therefore let select() work properly).
+        select = support.import_module("select")
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys;'
+                              'sys.stdout.write("apple")'],
+                             stdout=subprocess.PIPE,
+                             bufsize=0)
+        f = p.stdout
+        self.addCleanup(f.close)
+        try:
+            self.assertEqual(f.read(4), b"appl")
+            self.assertIn(f, select.select([f], [], [], 0.0)[0])
+        finally:
+            p.wait()
+
+    def test_zombie_fast_process_del(self) -> None:
+        # Issue #12650: on Unix, if Popen.__del__() was called before the
+        # process exited, it wouldn't be added to subprocess._active, and would
+        # remain a zombie.
+        # spawn a Popen, and delete its reference before it exits
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import sys, time;'
+                              'time.sleep(0.2)'],
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+        self.addCleanup(p.stdout.close)
+        self.addCleanup(p.stderr.close)
+        ident = id(p)
+        pid = p.pid
+        del p
+        # check that p is in the active processes list
+        self.assertIn(ident, [id(o) for o in subprocess._active])
+
+    def test_leak_fast_process_del_killed(self) -> None:
+        # Issue #12650: on Unix, if Popen.__del__() was called before the
+        # process exited, and the process got killed by a signal, it would never
+        # be removed from subprocess._active, which triggered a FD and memory
+        # leak.
+        # spawn a Popen, delete its reference and kill it
+        p = subprocess.Popen([sys.executable, "-c",
+                              'import time;'
+                              'time.sleep(3)'],
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+        self.addCleanup(p.stdout.close)
+        self.addCleanup(p.stderr.close)
+        ident = id(p)
+        pid = p.pid
+        del p
+        os.kill(pid, signal.SIGKILL)
+        # check that p is in the active processes list
+        self.assertIn(ident, [id(o) for o in subprocess._active])
+
+        # let some time for the process to exit, and create a new Popen: this
+        # should trigger the wait() of p
+        time.sleep(0.2)
+        with self.assertRaises(EnvironmentError) as c:
+            with subprocess.Popen(['nonexisting_i_hope'],
+                                  stdout=subprocess.PIPE,
+                                  stderr=subprocess.PIPE) as proc:
+                pass
+        # p should have been wait()ed on, and removed from the _active list
+        self.assertRaises(OSError, os.waitpid, pid, 0)
+        self.assertNotIn(ident, [id(o) for o in subprocess._active])
+
+
+ at unittest.skipUnless(mswindows, "Windows specific tests")
+class Win32ProcessTestCase(BaseTestCase):
+
+    def test_startupinfo(self) -> None:
+        # startupinfo argument
+        # We uses hardcoded constants, because we do not want to
+        # depend on win32all.
+        STARTF_USESHOWWINDOW = 1
+        SW_MAXIMIZE = 3
+        startupinfo = subprocess.STARTUPINFO()
+        startupinfo.dwFlags = STARTF_USESHOWWINDOW
+        startupinfo.wShowWindow = SW_MAXIMIZE
+        # Since Python is a console process, it won't be affected
+        # by wShowWindow, but the argument should be silently
+        # ignored
+        subprocess.call([sys.executable, "-c", "import sys; sys.exit(0)"],
+                        startupinfo=startupinfo)
+
+    def test_creationflags(self) -> None:
+        # creationflags argument
+        CREATE_NEW_CONSOLE = 16
+        sys.stderr.write("    a DOS box should flash briefly ...\n")
+        subprocess.call(sys.executable +
+                        ' -c "import time; time.sleep(0.25)"',
+                        creationflags=CREATE_NEW_CONSOLE)
+
+    def test_invalid_args(self) -> None:
+        # invalid arguments should raise ValueError
+        self.assertRaises(ValueError, subprocess.call,
+                          [sys.executable, "-c",
+                           "import sys; sys.exit(47)"],
+                          preexec_fn=lambda: 1)
+        self.assertRaises(ValueError, subprocess.call,
+                          [sys.executable, "-c",
+                           "import sys; sys.exit(47)"],
+                          stdout=subprocess.PIPE,
+                          close_fds=True)
+
+    def test_close_fds(self) -> None:
+        # close file descriptors
+        rc = subprocess.call([sys.executable, "-c",
+                              "import sys; sys.exit(47)"],
+                              close_fds=True)
+        self.assertEqual(rc, 47)
+
+    def test_shell_sequence(self) -> None:
+        # Run command through the shell (sequence)
+        newenv = os.environ.copy()
+        newenv["FRUIT"] = "physalis"
+        p = subprocess.Popen(["set"], shell=1,
+                             stdout=subprocess.PIPE,
+                             env=newenv)
+        self.addCleanup(p.stdout.close)
+        self.assertIn(b"physalis", p.stdout.read())
+
+    def test_shell_string(self) -> None:
+        # Run command through the shell (string)
+        newenv = os.environ.copy()
+        newenv["FRUIT"] = "physalis"
+        p = subprocess.Popen("set", shell=1,
+                             stdout=subprocess.PIPE,
+                             env=newenv)
+        self.addCleanup(p.stdout.close)
+        self.assertIn(b"physalis", p.stdout.read())
+
+    def test_call_string(self) -> None:
+        # call() function with string argument on Windows
+        rc = subprocess.call(sys.executable +
+                             ' -c "import sys; sys.exit(47)"')
+        self.assertEqual(rc, 47)
+
+    def _kill_process(self, method: str, *args: Any) -> None:
+        # Some win32 buildbot raises EOFError if stdin is inherited
+        p = subprocess.Popen([sys.executable, "-c", """if 1:
+                             import sys, time
+                             sys.stdout.write('x\\n')
+                             sys.stdout.flush()
+                             time.sleep(30)
+                             """],
+                             stdin=subprocess.PIPE,
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+        self.addCleanup(p.stdout.close)
+        self.addCleanup(p.stderr.close)
+        self.addCleanup(p.stdin.close)
+        # Wait for the interpreter to be completely initialized before
+        # sending any signal.
+        p.stdout.read(1)
+        getattr(p, method)(*args)
+        _, stderr = p.communicate()
+        self.assertStderrEqual(stderr, b'')
+        returncode = p.wait()
+        self.assertNotEqual(returncode, 0)
+
+    def test_send_signal(self) -> None:
+        self._kill_process('send_signal', signal.SIGTERM)
+
+    def test_kill(self) -> None:
+        self._kill_process('kill')
+
+    def test_terminate(self) -> None:
+        self._kill_process('terminate')
+
+
+# The module says:
+#   "NB This only works (and is only relevant) for UNIX."
+#
+# Actually, getoutput should work on any platform with an os.popen, but
+# I'll take the comment as given, and skip this suite.
+ at unittest.skipUnless(os.name == 'posix', "only relevant for UNIX")
+class CommandTests(unittest.TestCase):
+    def test_getoutput(self) -> None:
+        self.assertEqual(subprocess.getoutput('echo xyzzy'), 'xyzzy')
+        self.assertEqual(subprocess.getstatusoutput('echo xyzzy'),
+                         (0, 'xyzzy'))
+
+        # we use mkdtemp in the next line to create an empty directory
+        # under our exclusive control; from that, we can invent a pathname
+        # that we _know_ won't exist.  This is guaranteed to fail.
+        dir = None # type: str
+        try:
+            dir = tempfile.mkdtemp()
+            name = os.path.join(dir, "foo")
+
+            status, output = subprocess.getstatusoutput('cat ' + name)
+            self.assertNotEqual(status, 0)
+        finally:
+            if dir is not None:
+                os.rmdir(dir)
+
+
+ at unittest.skipUnless(getattr(subprocess, '_has_poll', False),
+                     "poll system call not supported")
+class ProcessTestCaseNoPoll(ProcessTestCase):
+    def setUp(self) -> None:
+        subprocess._has_poll = False
+        ProcessTestCase.setUp(self)
+
+    def tearDown(self) -> None:
+        subprocess._has_poll = True
+        ProcessTestCase.tearDown(self)
+
+
+#@unittest.skipUnless(getattr(subprocess, '_posixsubprocess', False),
+#                     "_posixsubprocess extension module not found.")
+#class ProcessTestCasePOSIXPurePython(ProcessTestCase, POSIXProcessTestCase):
+#    @classmethod
+#    def setUpClass(cls):
+#        global subprocess
+#        assert subprocess._posixsubprocess
+#        # Reimport subprocess while forcing _posixsubprocess to not exist.
+#        with support.check_warnings(('.*_posixsubprocess .* not being used.*',
+#                                     RuntimeWarning)):
+#            subprocess = support.import_fresh_module(
+#                    'subprocess', blocked=['_posixsubprocess'])
+#        assert not subprocess._posixsubprocess
+#
+#    @classmethod
+#    def tearDownClass(cls):
+#        global subprocess
+#        # Reimport subprocess as it should be, restoring order to the universe#.
+#        subprocess = support.import_fresh_module('subprocess')
+#        assert subprocess._posixsubprocess
+
+
+class HelperFunctionTests(unittest.TestCase):
+    @unittest.skipIf(mswindows, "errno and EINTR make no sense on windows")
+    def test_eintr_retry_call(self) -> None:
+        record_calls = []  # type: List[Any]
+        def fake_os_func(*args: Any) -> tuple:
+            record_calls.append(args)
+            if len(record_calls) == 2:
+                raise OSError(errno.EINTR, "fake interrupted system call")
+            return tuple(reversed(args))
+
+        self.assertEqual((999, 256),
+                         subprocess._eintr_retry_call(fake_os_func, 256, 999))
+        self.assertEqual([(256, 999)], record_calls)
+        # This time there will be an EINTR so it will loop once.
+        self.assertEqual((666,),
+                         subprocess._eintr_retry_call(fake_os_func, 666))
+        self.assertEqual([(256, 999), (666,), (666,)], record_calls)
+
+
+ at unittest.skipUnless(mswindows, "Windows-specific tests")
+class CommandsWithSpaces (BaseTestCase):
+
+    def setUp(self) -> None:
+        super().setUp()
+        f, fname = mkstemp(".py", "te st")
+        self.fname = fname.lower ()
+        os.write(f, b"import sys;"
+                    b"sys.stdout.write('%d %s' % (len(sys.argv), [a.lower () for a in sys.argv]))"
+        )
+        os.close(f)
+
+    def tearDown(self) -> None:
+        os.remove(self.fname)
+        super().tearDown()
+
+    def with_spaces(self, *args: Any, **kwargs: Any) -> None:
+        kwargs['stdout'] = subprocess.PIPE
+        p = subprocess.Popen(*args, **kwargs)
+        self.addCleanup(p.stdout.close)
+        self.assertEqual(
+          p.stdout.read ().decode("mbcs"),
+          "2 [%r, 'ab cd']" % self.fname
+        )
+
+    def test_shell_string_with_spaces(self) -> None:
+        # call() function with string argument with spaces on Windows
+        self.with_spaces('"%s" "%s" "%s"' % (sys.executable, self.fname,
+                                             "ab cd"), shell=1)
+
+    def test_shell_sequence_with_spaces(self) -> None:
+        # call() function with sequence argument with spaces on Windows
+        self.with_spaces([sys.executable, self.fname, "ab cd"], shell=1)
+
+    def test_noshell_string_with_spaces(self) -> None:
+        # call() function with string argument with spaces on Windows
+        self.with_spaces('"%s" "%s" "%s"' % (sys.executable, self.fname,
+                             "ab cd"))
+
+    def test_noshell_sequence_with_spaces(self) -> None:
+        # call() function with sequence argument with spaces on Windows
+        self.with_spaces([sys.executable, self.fname, "ab cd"])
+
+
+class ContextManagerTests(BaseTestCase):
+
+    def test_pipe(self) -> None:
+        with subprocess.Popen([sys.executable, "-c",
+                               "import sys;"
+                               "sys.stdout.write('stdout');"
+                               "sys.stderr.write('stderr');"],
+                              stdout=subprocess.PIPE,
+                              stderr=subprocess.PIPE) as proc:
+            self.assertEqual(proc.stdout.read(), b"stdout")
+            self.assertStderrEqual(proc.stderr.read(), b"stderr")
+
+        self.assertTrue(proc.stdout.closed)
+        self.assertTrue(proc.stderr.closed)
+
+    def test_returncode(self) -> None:
+        with subprocess.Popen([sys.executable, "-c",
+                               "import sys; sys.exit(100)"]) as proc:
+            pass
+        # __exit__ calls wait(), so the returncode should be set
+        self.assertEqual(proc.returncode, 100)
+
+    def test_communicate_stdin(self) -> None:
+        with subprocess.Popen([sys.executable, "-c",
+                              "import sys;"
+                              "sys.exit(sys.stdin.read() == 'context')"],
+                             stdin=subprocess.PIPE) as proc:
+            proc.communicate(b"context")
+            self.assertEqual(proc.returncode, 1)
+
+    def test_invalid_args(self) -> None:
+        with self.assertRaises(EnvironmentError) as c:
+            with subprocess.Popen(['nonexisting_i_hope'],
+                                  stdout=subprocess.PIPE,
+                                  stderr=subprocess.PIPE) as proc:
+                pass
+
+            if c.exception.errno != errno.ENOENT:  # ignore "no such file"
+                raise c.exception
+
+
+def test_main():
+    unit_tests = (ProcessTestCase,
+                  POSIXProcessTestCase,
+                  Win32ProcessTestCase,
+                  #ProcessTestCasePOSIXPurePython,
+                  CommandTests,
+                  ProcessTestCaseNoPoll,
+                  HelperFunctionTests,
+                  CommandsWithSpaces,
+                  ContextManagerTests,
+                  )
+
+    support.run_unittest(*unit_tests)
+    support.reap_children()
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/test-data/stdlib-samples/3.2/test/test_tempfile.py b/test-data/stdlib-samples/3.2/test/test_tempfile.py
new file mode 100644
index 0000000..31b0fec
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_tempfile.py
@@ -0,0 +1,1122 @@
+# tempfile.py unit tests.
+import tempfile
+import os
+import signal
+import sys
+import re
+import warnings
+
+import unittest
+from test import support
+
+from typing import Any, AnyStr, List, Dict, IO
+
+
+if hasattr(os, 'stat'):
+    import stat
+    has_stat = 1
+else:
+    has_stat = 0
+
+has_textmode = (tempfile._text_openflags != tempfile._bin_openflags)
+has_spawnl = hasattr(os, 'spawnl')
+
+# TEST_FILES may need to be tweaked for systems depending on the maximum
+# number of files that can be opened at one time (see ulimit -n)
+if sys.platform in ('openbsd3', 'openbsd4'):
+    TEST_FILES = 48
+else:
+    TEST_FILES = 100
+
+# This is organized as one test for each chunk of code in tempfile.py,
+# in order of their appearance in the file.  Testing which requires
+# threads is not done here.
+
+# Common functionality.
+class TC(unittest.TestCase):
+
+    str_check = re.compile(r"[a-zA-Z0-9_-]{6}$")
+
+    def setUp(self) -> None:
+        self._warnings_manager = support.check_warnings()
+        self._warnings_manager.__enter__()
+        warnings.filterwarnings("ignore", category=RuntimeWarning,
+                                message="mktemp", module=__name__)
+
+    def tearDown(self) -> None:
+        self._warnings_manager.__exit__(None, None, None)
+
+
+    def failOnException(self, what: str, ei: tuple = None) -> None:
+        if ei is None:
+            ei = sys.exc_info()
+        self.fail("%s raised %s: %s" % (what, ei[0], ei[1]))
+
+    def nameCheck(self, name: str, dir: str, pre: str, suf: str) -> None:
+        (ndir, nbase) = os.path.split(name)
+        npre  = nbase[:len(pre)]
+        nsuf  = nbase[len(nbase)-len(suf):]
+
+        # check for equality of the absolute paths!
+        self.assertEqual(os.path.abspath(ndir), os.path.abspath(dir),
+                         "file '%s' not in directory '%s'" % (name, dir))
+        self.assertEqual(npre, pre,
+                         "file '%s' does not begin with '%s'" % (nbase, pre))
+        self.assertEqual(nsuf, suf,
+                         "file '%s' does not end with '%s'" % (nbase, suf))
+
+        nbase = nbase[len(pre):len(nbase)-len(suf)]
+        self.assertTrue(self.str_check.match(nbase),
+                     "random string '%s' does not match /^[a-zA-Z0-9_-]{6}$/"
+                     % nbase)
+
+test_classes = [] # type: List[type]
+
+class test_exports(TC):
+    def test_exports(self) -> None:
+        # There are no surprising symbols in the tempfile module
+        dict = tempfile.__dict__
+
+        expected = {
+            "NamedTemporaryFile" : 1,
+            "TemporaryFile" : 1,
+            "mkstemp" : 1,
+            "mkdtemp" : 1,
+            "mktemp" : 1,
+            "TMP_MAX" : 1,
+            "gettempprefix" : 1,
+            "gettempdir" : 1,
+            "tempdir" : 1,
+            "template" : 1,
+            "SpooledTemporaryFile" : 1,
+            "TemporaryDirectory" : 1,
+        }
+
+        unexp = []  # type: List[str]
+        for key in dict:
+            if key[0] != '_' and key not in expected:
+                unexp.append(key)
+        self.assertTrue(len(unexp) == 0,
+                        "unexpected keys: %s" % unexp)
+
+test_classes.append(test_exports)
+
+
+class test__RandomNameSequence(TC):
+    """Test the internal iterator object _RandomNameSequence."""
+
+    def setUp(self) -> None:
+        self.r = tempfile._RandomNameSequence()
+        super().setUp()
+
+    def test_get_six_char_str(self) -> None:
+        # _RandomNameSequence returns a six-character string
+        s = next(self.r)
+        self.nameCheck(s, '', '', '')
+
+    def test_many(self) -> None:
+        # _RandomNameSequence returns no duplicate strings (stochastic)
+
+        dict = {}  # type: Dict[str, int]
+        r = self.r
+        for i in range(TEST_FILES):
+            s = next(r)
+            self.nameCheck(s, '', '', '')
+            self.assertNotIn(s, dict)
+            dict[s] = 1
+
+    def supports_iter(self) -> None:
+        # _RandomNameSequence supports the iterator protocol
+
+        i = 0
+        r = self.r
+        try:
+            for s in r:
+                i += 1
+                if i == 20:
+                    break
+        except:
+            self.failOnException("iteration")
+
+    @unittest.skipUnless(hasattr(os, 'fork'),
+        "os.fork is required for this test")
+    def test_process_awareness(self) -> None:
+        # ensure that the random source differs between
+        # child and parent.
+        read_fd, write_fd = os.pipe()
+        pid = None # type: int
+        try:
+            pid = os.fork()
+            if not pid:
+                os.close(read_fd)
+                os.write(write_fd, next(self.r).encode("ascii"))
+                os.close(write_fd)
+                # bypass the normal exit handlers- leave those to
+                # the parent.
+                os._exit(0)
+            parent_value = next(self.r)
+            child_value = os.read(read_fd, len(parent_value)).decode("ascii")
+        finally:
+            if pid:
+                # best effort to ensure the process can't bleed out
+                # via any bugs above
+                try:
+                    os.kill(pid, signal.SIGKILL)
+                except EnvironmentError:
+                    pass
+            os.close(read_fd)
+            os.close(write_fd)
+        self.assertNotEqual(child_value, parent_value)
+
+
+test_classes.append(test__RandomNameSequence)
+
+
+class test__candidate_tempdir_list(TC):
+    """Test the internal function _candidate_tempdir_list."""
+
+    def test_nonempty_list(self) -> None:
+        # _candidate_tempdir_list returns a nonempty list of strings
+
+        cand = tempfile._candidate_tempdir_list()
+
+        self.assertFalse(len(cand) == 0)
+        for c in cand:
+            self.assertIsInstance(c, str)
+
+    def test_wanted_dirs(self) -> None:
+        # _candidate_tempdir_list contains the expected directories
+
+        # Make sure the interesting environment variables are all set.
+        with support.EnvironmentVarGuard() as env:
+            for envname in 'TMPDIR', 'TEMP', 'TMP':
+                dirname = os.getenv(envname)
+                if not dirname:
+                    env[envname] = os.path.abspath(envname)
+
+            cand = tempfile._candidate_tempdir_list()
+
+            for envname in 'TMPDIR', 'TEMP', 'TMP':
+                dirname = os.getenv(envname)
+                if not dirname: raise ValueError
+                self.assertIn(dirname, cand)
+
+            try:
+                dirname = os.getcwd()
+            except (AttributeError, os.error):
+                dirname = os.curdir
+
+            self.assertIn(dirname, cand)
+
+            # Not practical to try to verify the presence of OS-specific
+            # paths in this list.
+
+test_classes.append(test__candidate_tempdir_list)
+
+
+# We test _get_default_tempdir by testing gettempdir.
+
+
+class test__get_candidate_names(TC):
+    """Test the internal function _get_candidate_names."""
+
+    def test_retval(self) -> None:
+        # _get_candidate_names returns a _RandomNameSequence object
+        obj = tempfile._get_candidate_names()
+        self.assertIsInstance(obj, tempfile._RandomNameSequence)
+
+    def test_same_thing(self) -> None:
+        # _get_candidate_names always returns the same object
+        a = tempfile._get_candidate_names()
+        b = tempfile._get_candidate_names()
+
+        self.assertTrue(a is b)
+
+test_classes.append(test__get_candidate_names)
+
+
+class test__mkstemp_inner(TC):
+    """Test the internal function _mkstemp_inner."""
+
+    class mkstemped:
+        _bflags = tempfile._bin_openflags
+        _tflags = tempfile._text_openflags
+
+        def __init__(self, dir: str, pre: str, suf: str, bin: int) -> None:
+            if bin: flags = self._bflags
+            else:   flags = self._tflags
+
+            (self.fd, self.name) = tempfile._mkstemp_inner(dir, pre, suf, flags)
+
+            self._close = os.close
+            self._unlink = os.unlink
+
+        def write(self, str: bytes) -> None:
+            os.write(self.fd, str)
+
+        def __del__(self) -> None:
+            self._close(self.fd)
+            self._unlink(self.name)
+
+    def do_create(self, dir: str = None, pre: str = "", suf: str= "",
+                  bin: int = 1) -> mkstemped:
+        if dir is None:
+            dir = tempfile.gettempdir()
+        try:
+            file = test__mkstemp_inner.mkstemped(dir, pre, suf, bin) # see #259
+        except:
+            self.failOnException("_mkstemp_inner")
+
+        self.nameCheck(file.name, dir, pre, suf)
+        return file
+
+    def test_basic(self) -> None:
+        # _mkstemp_inner can create files
+        self.do_create().write(b"blat")
+        self.do_create(pre="a").write(b"blat")
+        self.do_create(suf="b").write(b"blat")
+        self.do_create(pre="a", suf="b").write(b"blat")
+        self.do_create(pre="aa", suf=".txt").write(b"blat")
+
+    def test_basic_many(self) -> None:
+        # _mkstemp_inner can create many files (stochastic)
+        extant = list(range(TEST_FILES))  # type: List[Any]
+        for i in extant:
+            extant[i] = self.do_create(pre="aa")
+
+    def test_choose_directory(self) -> None:
+        # _mkstemp_inner can create files in a user-selected directory
+        dir = tempfile.mkdtemp()
+        try:
+            self.do_create(dir=dir).write(b"blat")
+        finally:
+            os.rmdir(dir)
+
+    def test_file_mode(self) -> None:
+        # _mkstemp_inner creates files with the proper mode
+        if not has_stat:
+            return            # ugh, can't use SkipTest.
+
+        file = self.do_create()
+        mode = stat.S_IMODE(os.stat(file.name).st_mode)
+        expected = 0o600
+        if sys.platform in ('win32', 'os2emx'):
+            # There's no distinction among 'user', 'group' and 'world';
+            # replicate the 'user' bits.
+            user = expected >> 6
+            expected = user * (1 + 8 + 64)
+        self.assertEqual(mode, expected)
+
+    def test_noinherit(self) -> None:
+        # _mkstemp_inner file handles are not inherited by child processes
+        if not has_spawnl:
+            return            # ugh, can't use SkipTest.
+
+        if support.verbose:
+            v="v"
+        else:
+            v="q"
+
+        file = self.do_create()
+        fd = "%d" % file.fd
+
+        try:
+            me = __file__ # type: str
+        except NameError:
+            me = sys.argv[0]
+
+        # We have to exec something, so that FD_CLOEXEC will take
+        # effect.  The core of this test is therefore in
+        # tf_inherit_check.py, which see.
+        tester = os.path.join(os.path.dirname(os.path.abspath(me)),
+                              "tf_inherit_check.py")
+
+        # On Windows a spawn* /path/ with embedded spaces shouldn't be quoted,
+        # but an arg with embedded spaces should be decorated with double
+        # quotes on each end
+        if sys.platform in ('win32',):
+            decorated = '"%s"' % sys.executable
+            tester = '"%s"' % tester
+        else:
+            decorated = sys.executable
+
+        retval = os.spawnl(os.P_WAIT, sys.executable, decorated, tester, v, fd)
+        self.assertFalse(retval < 0,
+                    "child process caught fatal signal %d" % -retval)
+        self.assertFalse(retval > 0, "child process reports failure %d"%retval)
+
+    def test_textmode(self) -> None:
+        # _mkstemp_inner can create files in text mode
+        if not has_textmode:
+            return            # ugh, can't use SkipTest.
+
+        # A text file is truncated at the first Ctrl+Z byte
+        f = self.do_create(bin=0)
+        f.write(b"blat\x1a")
+        f.write(b"extra\n")
+        os.lseek(f.fd, 0, os.SEEK_SET)
+        self.assertEqual(os.read(f.fd, 20), b"blat")
+
+test_classes.append(test__mkstemp_inner)
+
+
+class test_gettempprefix(TC):
+    """Test gettempprefix()."""
+
+    def test_sane_template(self) -> None:
+        # gettempprefix returns a nonempty prefix string
+        p = tempfile.gettempprefix()
+
+        self.assertIsInstance(p, str)
+        self.assertTrue(len(p) > 0)
+
+    def test_usable_template(self) -> None:
+        # gettempprefix returns a usable prefix string
+
+        # Create a temp directory, avoiding use of the prefix.
+        # Then attempt to create a file whose name is
+        # prefix + 'xxxxxx.xxx' in that directory.
+        p = tempfile.gettempprefix() + "xxxxxx.xxx"
+        d = tempfile.mkdtemp(prefix="")
+        try:
+            p = os.path.join(d, p)
+            try:
+                fd = os.open(p, os.O_RDWR | os.O_CREAT)
+            except:
+                self.failOnException("os.open")
+            os.close(fd)
+            os.unlink(p)
+        finally:
+            os.rmdir(d)
+
+test_classes.append(test_gettempprefix)
+
+
+class test_gettempdir(TC):
+    """Test gettempdir()."""
+
+    def test_directory_exists(self) -> None:
+        # gettempdir returns a directory which exists
+
+        dir = tempfile.gettempdir()
+        self.assertTrue(os.path.isabs(dir) or dir == os.curdir,
+                     "%s is not an absolute path" % dir)
+        self.assertTrue(os.path.isdir(dir),
+                     "%s is not a directory" % dir)
+
+    def test_directory_writable(self) -> None:
+        # gettempdir returns a directory writable by the user
+
+        # sneaky: just instantiate a NamedTemporaryFile, which
+        # defaults to writing into the directory returned by
+        # gettempdir.
+        try:
+            file = tempfile.NamedTemporaryFile()
+            file.write(b"blat")
+            file.close()
+        except:
+            self.failOnException("create file in %s" % tempfile.gettempdir())
+
+    def test_same_thing(self) -> None:
+        # gettempdir always returns the same object
+        a = tempfile.gettempdir()
+        b = tempfile.gettempdir()
+
+        self.assertTrue(a is b)
+
+test_classes.append(test_gettempdir)
+
+
+class test_mkstemp(TC):
+    """Test mkstemp()."""
+
+    def do_create(self, dir: str = None, pre: str = "", suf: str = "") -> None:
+        if dir is None:
+            dir = tempfile.gettempdir()
+        try:
+            (fd, name) = tempfile.mkstemp(dir=dir, prefix=pre, suffix=suf)
+            (ndir, nbase) = os.path.split(name)
+            adir = os.path.abspath(dir)
+            self.assertEqual(adir, ndir,
+                "Directory '%s' incorrectly returned as '%s'" % (adir, ndir))
+        except:
+            self.failOnException("mkstemp")
+
+        try:
+            self.nameCheck(name, dir, pre, suf)
+        finally:
+            os.close(fd)
+            os.unlink(name)
+
+    def test_basic(self) -> None:
+        # mkstemp can create files
+        self.do_create()
+        self.do_create(pre="a")
+        self.do_create(suf="b")
+        self.do_create(pre="a", suf="b")
+        self.do_create(pre="aa", suf=".txt")
+        self.do_create(dir=".")
+
+    def test_choose_directory(self) -> None:
+        # mkstemp can create directories in a user-selected directory
+        dir = tempfile.mkdtemp()
+        try:
+            self.do_create(dir=dir)
+        finally:
+            os.rmdir(dir)
+
+test_classes.append(test_mkstemp)
+
+
+class test_mkdtemp(TC):
+    """Test mkdtemp()."""
+
+    def do_create(self, dir: str = None, pre: str = "", suf: str = "") -> str:
+        if dir is None:
+            dir = tempfile.gettempdir()
+        try:
+            name = tempfile.mkdtemp(dir=dir, prefix=pre, suffix=suf)
+        except:
+            self.failOnException("mkdtemp")
+
+        try:
+            self.nameCheck(name, dir, pre, suf)
+            return name
+        except:
+            os.rmdir(name)
+            raise
+
+    def test_basic(self) -> None:
+        # mkdtemp can create directories
+        os.rmdir(self.do_create())
+        os.rmdir(self.do_create(pre="a"))
+        os.rmdir(self.do_create(suf="b"))
+        os.rmdir(self.do_create(pre="a", suf="b"))
+        os.rmdir(self.do_create(pre="aa", suf=".txt"))
+
+    def test_basic_many(self) -> None:
+        # mkdtemp can create many directories (stochastic)
+        extant = list(range(TEST_FILES))  # type: List[Any]
+        try:
+            for i in extant:
+                extant[i] = self.do_create(pre="aa")
+        finally:
+            for i in extant:
+                if(isinstance(i, str)):
+                    os.rmdir(i)
+
+    def test_choose_directory(self) -> None:
+        # mkdtemp can create directories in a user-selected directory
+        dir = tempfile.mkdtemp()
+        try:
+            os.rmdir(self.do_create(dir=dir))
+        finally:
+            os.rmdir(dir)
+
+    def test_mode(self) -> None:
+        # mkdtemp creates directories with the proper mode
+        if not has_stat:
+            return            # ugh, can't use SkipTest.
+
+        dir = self.do_create()
+        try:
+            mode = stat.S_IMODE(os.stat(dir).st_mode)
+            mode &= 0o777 # Mask off sticky bits inherited from /tmp
+            expected = 0o700
+            if sys.platform in ('win32', 'os2emx'):
+                # There's no distinction among 'user', 'group' and 'world';
+                # replicate the 'user' bits.
+                user = expected >> 6
+                expected = user * (1 + 8 + 64)
+            self.assertEqual(mode, expected)
+        finally:
+            os.rmdir(dir)
+
+test_classes.append(test_mkdtemp)
+
+
+class test_mktemp(TC):
+    """Test mktemp()."""
+
+    # For safety, all use of mktemp must occur in a private directory.
+    # We must also suppress the RuntimeWarning it generates.
+    def setUp(self) -> None:
+        self.dir = tempfile.mkdtemp()
+        super().setUp()
+
+    def tearDown(self) -> None:
+        if self.dir:
+            os.rmdir(self.dir)
+            self.dir = None
+        super().tearDown()
+
+    class mktemped:
+        def _unlink(self, path: str) -> None:
+            os.unlink(path)
+
+        _bflags = tempfile._bin_openflags
+
+        def __init__(self, dir: str, pre: str, suf: str) -> None:
+            self.name = tempfile.mktemp(dir=dir, prefix=pre, suffix=suf)
+            # Create the file.  This will raise an exception if it's
+            # mysteriously appeared in the meanwhile.
+            os.close(os.open(self.name, self._bflags, 0o600))
+
+        def __del__(self) -> None:
+            self._unlink(self.name)
+
+    def do_create(self, pre: str = "", suf: str = "") -> mktemped:
+        try:
+            file = test_mktemp.mktemped(self.dir, pre, suf) # see #259
+        except:
+            self.failOnException("mktemp")
+
+        self.nameCheck(file.name, self.dir, pre, suf)
+        return file
+
+    def test_basic(self) -> None:
+        # mktemp can choose usable file names
+        self.do_create()
+        self.do_create(pre="a")
+        self.do_create(suf="b")
+        self.do_create(pre="a", suf="b")
+        self.do_create(pre="aa", suf=".txt")
+
+    def test_many(self) -> None:
+        # mktemp can choose many usable file names (stochastic)
+        extant = list(range(TEST_FILES))  # type: List[Any]
+        for i in extant:
+            extant[i] = self.do_create(pre="aa")
+
+##     def test_warning(self):
+##         # mktemp issues a warning when used
+##         warnings.filterwarnings("error",
+##                                 category=RuntimeWarning,
+##                                 message="mktemp")
+##         self.assertRaises(RuntimeWarning,
+##                           tempfile.mktemp, dir=self.dir)
+
+test_classes.append(test_mktemp)
+
+
+# We test _TemporaryFileWrapper by testing NamedTemporaryFile.
+
+
+class test_NamedTemporaryFile(TC):
+    """Test NamedTemporaryFile()."""
+
+    def do_create(self, dir: str = None, pre: str = "", suf: str = "",
+                  delete: bool = True) -> IO[Any]:
+        if dir is None:
+            dir = tempfile.gettempdir()
+        try:
+            file = tempfile.NamedTemporaryFile(dir=dir, prefix=pre, suffix=suf,
+                                               delete=delete)
+        except:
+            self.failOnException("NamedTemporaryFile")
+
+        self.nameCheck(file.name, dir, pre, suf)
+        return file
+
+
+    def test_basic(self) -> None:
+        # NamedTemporaryFile can create files
+        self.do_create()
+        self.do_create(pre="a")
+        self.do_create(suf="b")
+        self.do_create(pre="a", suf="b")
+        self.do_create(pre="aa", suf=".txt")
+
+    def test_creates_named(self) -> None:
+        # NamedTemporaryFile creates files with names
+        f = tempfile.NamedTemporaryFile()
+        self.assertTrue(os.path.exists(f.name),
+                        "NamedTemporaryFile %s does not exist" % f.name)
+
+    def test_del_on_close(self) -> None:
+        # A NamedTemporaryFile is deleted when closed
+        dir = tempfile.mkdtemp()
+        try:
+            f = tempfile.NamedTemporaryFile(dir=dir)
+            f.write(b'blat')
+            f.close()
+            self.assertFalse(os.path.exists(f.name),
+                        "NamedTemporaryFile %s exists after close" % f.name)
+        finally:
+            os.rmdir(dir)
+
+    def test_dis_del_on_close(self) -> None:
+        # Tests that delete-on-close can be disabled
+        dir = tempfile.mkdtemp()
+        tmp = None # type: str
+        try:
+            f = tempfile.NamedTemporaryFile(dir=dir, delete=False)
+            tmp = f.name
+            f.write(b'blat')
+            f.close()
+            self.assertTrue(os.path.exists(f.name),
+                        "NamedTemporaryFile %s missing after close" % f.name)
+        finally:
+            if tmp is not None:
+                os.unlink(tmp)
+            os.rmdir(dir)
+
+    def test_multiple_close(self) -> None:
+        # A NamedTemporaryFile can be closed many times without error
+        f = tempfile.NamedTemporaryFile()
+        f.write(b'abc\n')
+        f.close()
+        try:
+            f.close()
+            f.close()
+        except:
+            self.failOnException("close")
+
+    def test_context_manager(self) -> None:
+        # A NamedTemporaryFile can be used as a context manager
+        with tempfile.NamedTemporaryFile() as f:
+            self.assertTrue(os.path.exists(f.name))
+        self.assertFalse(os.path.exists(f.name))
+        def use_closed():
+            with f:
+                pass
+        self.assertRaises(ValueError, use_closed)
+
+    # How to test the mode and bufsize parameters?
+
+test_classes.append(test_NamedTemporaryFile)
+
+class test_SpooledTemporaryFile(TC):
+    """Test SpooledTemporaryFile()."""
+
+    def do_create(self, max_size: int = 0, dir: str = None, pre: str = "",
+                  suf: str = "") -> tempfile.SpooledTemporaryFile:
+        if dir is None:
+            dir = tempfile.gettempdir()
+        try:
+            file = tempfile.SpooledTemporaryFile(max_size=max_size, dir=dir, prefix=pre, suffix=suf)
+        except:
+            self.failOnException("SpooledTemporaryFile")
+
+        return file
+
+
+    def test_basic(self) -> None:
+        # SpooledTemporaryFile can create files
+        f = self.do_create()
+        self.assertFalse(f._rolled)
+        f = self.do_create(max_size=100, pre="a", suf=".txt")
+        self.assertFalse(f._rolled)
+
+    def test_del_on_close(self) -> None:
+        # A SpooledTemporaryFile is deleted when closed
+        dir = tempfile.mkdtemp()
+        try:
+            f = tempfile.SpooledTemporaryFile(max_size=10, dir=dir)
+            self.assertFalse(f._rolled)
+            f.write(b'blat ' * 5)
+            self.assertTrue(f._rolled)
+            filename = f.name
+            f.close()
+            self.assertFalse(isinstance(filename, str) and os.path.exists(filename),
+                        "SpooledTemporaryFile %s exists after close" % filename)
+        finally:
+            os.rmdir(dir)
+
+    def test_rewrite_small(self) -> None:
+        # A SpooledTemporaryFile can be written to multiple within the max_size
+        f = self.do_create(max_size=30)
+        self.assertFalse(f._rolled)
+        for i in range(5):
+            f.seek(0, 0)
+            f.write(b'x' * 20)
+        self.assertFalse(f._rolled)
+
+    def test_write_sequential(self) -> None:
+        # A SpooledTemporaryFile should hold exactly max_size bytes, and roll
+        # over afterward
+        f = self.do_create(max_size=30)
+        self.assertFalse(f._rolled)
+        f.write(b'x' * 20)
+        self.assertFalse(f._rolled)
+        f.write(b'x' * 10)
+        self.assertFalse(f._rolled)
+        f.write(b'x')
+        self.assertTrue(f._rolled)
+
+    def test_writelines(self) -> None:
+        # Verify writelines with a SpooledTemporaryFile
+        f = self.do_create()
+        f.writelines([b'x', b'y', b'z'])
+        f.seek(0)
+        buf = f.read()
+        self.assertEqual(buf, b'xyz')
+
+    def test_writelines_sequential(self) -> None:
+        # A SpooledTemporaryFile should hold exactly max_size bytes, and roll
+        # over afterward
+        f = self.do_create(max_size=35)
+        f.writelines([b'x' * 20, b'x' * 10, b'x' * 5])
+        self.assertFalse(f._rolled)
+        f.write(b'x')
+        self.assertTrue(f._rolled)
+
+    def test_sparse(self) -> None:
+        # A SpooledTemporaryFile that is written late in the file will extend
+        # when that occurs
+        f = self.do_create(max_size=30)
+        self.assertFalse(f._rolled)
+        f.seek(100, 0)
+        self.assertFalse(f._rolled)
+        f.write(b'x')
+        self.assertTrue(f._rolled)
+
+    def test_fileno(self) -> None:
+        # A SpooledTemporaryFile should roll over to a real file on fileno()
+        f = self.do_create(max_size=30)
+        self.assertFalse(f._rolled)
+        self.assertTrue(f.fileno() > 0)
+        self.assertTrue(f._rolled)
+
+    def test_multiple_close_before_rollover(self) -> None:
+        # A SpooledTemporaryFile can be closed many times without error
+        f = tempfile.SpooledTemporaryFile()
+        f.write(b'abc\n')
+        self.assertFalse(f._rolled)
+        f.close()
+        try:
+            f.close()
+            f.close()
+        except:
+            self.failOnException("close")
+
+    def test_multiple_close_after_rollover(self) -> None:
+        # A SpooledTemporaryFile can be closed many times without error
+        f = tempfile.SpooledTemporaryFile(max_size=1)
+        f.write(b'abc\n')
+        self.assertTrue(f._rolled)
+        f.close()
+        try:
+            f.close()
+            f.close()
+        except:
+            self.failOnException("close")
+
+    def test_bound_methods(self) -> None:
+        # It should be OK to steal a bound method from a SpooledTemporaryFile
+        # and use it independently; when the file rolls over, those bound
+        # methods should continue to function
+        f = self.do_create(max_size=30)
+        read = f.read
+        write = f.write
+        seek = f.seek
+
+        write(b"a" * 35)
+        write(b"b" * 35)
+        seek(0, 0)
+        self.assertEqual(read(70), b'a'*35 + b'b'*35)
+
+    def test_text_mode(self) -> None:
+        # Creating a SpooledTemporaryFile with a text mode should produce
+        # a file object reading and writing (Unicode) text strings.
+        f = tempfile.SpooledTemporaryFile(mode='w+', max_size=10)
+        f.write("abc\n")
+        f.seek(0)
+        self.assertEqual(f.read(), "abc\n")
+        f.write("def\n")
+        f.seek(0)
+        self.assertEqual(f.read(), "abc\ndef\n")
+        f.write("xyzzy\n")
+        f.seek(0)
+        self.assertEqual(f.read(), "abc\ndef\nxyzzy\n")
+        # Check that Ctrl+Z doesn't truncate the file
+        f.write("foo\x1abar\n")
+        f.seek(0)
+        self.assertEqual(f.read(), "abc\ndef\nxyzzy\nfoo\x1abar\n")
+
+    def test_text_newline_and_encoding(self) -> None:
+        f = tempfile.SpooledTemporaryFile(mode='w+', max_size=10,
+                                          newline='', encoding='utf-8')
+        f.write("\u039B\r\n")
+        f.seek(0)
+        self.assertEqual(f.read(), "\u039B\r\n")
+        self.assertFalse(f._rolled)
+
+        f.write("\u039B" * 20 + "\r\n")
+        f.seek(0)
+        self.assertEqual(f.read(), "\u039B\r\n" + ("\u039B" * 20) + "\r\n")
+        self.assertTrue(f._rolled)
+
+    def test_context_manager_before_rollover(self) -> None:
+        # A SpooledTemporaryFile can be used as a context manager
+        with tempfile.SpooledTemporaryFile(max_size=1) as f:
+            self.assertFalse(f._rolled)
+            self.assertFalse(f.closed)
+        self.assertTrue(f.closed)
+        def use_closed():
+            with f:
+                pass
+        self.assertRaises(ValueError, use_closed)
+
+    def test_context_manager_during_rollover(self) -> None:
+        # A SpooledTemporaryFile can be used as a context manager
+        with tempfile.SpooledTemporaryFile(max_size=1) as f:
+            self.assertFalse(f._rolled)
+            f.write(b'abc\n')
+            f.flush()
+            self.assertTrue(f._rolled)
+            self.assertFalse(f.closed)
+        self.assertTrue(f.closed)
+        def use_closed():
+            with f:
+                pass
+        self.assertRaises(ValueError, use_closed)
+
+    def test_context_manager_after_rollover(self) -> None:
+        # A SpooledTemporaryFile can be used as a context manager
+        f = tempfile.SpooledTemporaryFile(max_size=1)
+        f.write(b'abc\n')
+        f.flush()
+        self.assertTrue(f._rolled)
+        with f:
+            self.assertFalse(f.closed)
+        self.assertTrue(f.closed)
+        def use_closed():
+            with f:
+                pass
+        self.assertRaises(ValueError, use_closed)
+
+
+test_classes.append(test_SpooledTemporaryFile)
+
+
+class test_TemporaryFile(TC):
+    """Test TemporaryFile()."""
+
+    def test_basic(self) -> None:
+        # TemporaryFile can create files
+        # No point in testing the name params - the file has no name.
+        try:
+            tempfile.TemporaryFile()
+        except:
+            self.failOnException("TemporaryFile")
+
+    def test_has_no_name(self) -> None:
+        # TemporaryFile creates files with no names (on this system)
+        dir = tempfile.mkdtemp()
+        f = tempfile.TemporaryFile(dir=dir)
+        f.write(b'blat')
+
+        # Sneaky: because this file has no name, it should not prevent
+        # us from removing the directory it was created in.
+        try:
+            os.rmdir(dir)
+        except:
+            ei = sys.exc_info()
+            # cleanup
+            f.close()
+            os.rmdir(dir)
+            self.failOnException("rmdir", ei)
+
+    def test_multiple_close(self) -> None:
+        # A TemporaryFile can be closed many times without error
+        f = tempfile.TemporaryFile()
+        f.write(b'abc\n')
+        f.close()
+        try:
+            f.close()
+            f.close()
+        except:
+            self.failOnException("close")
+
+    # How to test the mode and bufsize parameters?
+    def test_mode_and_encoding(self) -> None:
+
+        def roundtrip(input: AnyStr, *args: Any, **kwargs: Any) -> None:
+            with tempfile.TemporaryFile(*args, **kwargs) as fileobj:
+                fileobj.write(input)
+                fileobj.seek(0)
+                self.assertEqual(input, fileobj.read())
+
+        roundtrip(b"1234", "w+b")
+        roundtrip("abdc\n", "w+")
+        roundtrip("\u039B", "w+", encoding="utf-16")
+        roundtrip("foo\r\n", "w+", newline="")
+
+
+if tempfile.NamedTemporaryFile is not tempfile.TemporaryFile:
+    test_classes.append(test_TemporaryFile)
+
+
+# Helper for test_del_on_shutdown
+class NulledModules:
+    def __init__(self, *modules: Any) -> None:
+        self.refs = [mod.__dict__ for mod in modules]
+        self.contents = [ref.copy() for ref in self.refs]
+
+    def __enter__(self) -> None:
+        for d in self.refs:
+            for key in d:
+                d[key] = None
+
+    def __exit__(self, *exc_info: Any) -> None:
+        for d, c in zip(self.refs, self.contents):
+            d.clear()
+            d.update(c)
+
+class test_TemporaryDirectory(TC):
+    """Test TemporaryDirectory()."""
+
+    def do_create(self, dir: str = None, pre: str = "", suf: str = "",
+                  recurse: int = 1) -> tempfile.TemporaryDirectory:
+        if dir is None:
+            dir = tempfile.gettempdir()
+        try:
+            tmp = tempfile.TemporaryDirectory(dir=dir, prefix=pre, suffix=suf)
+        except:
+            self.failOnException("TemporaryDirectory")
+        self.nameCheck(tmp.name, dir, pre, suf)
+        # Create a subdirectory and some files
+        if recurse:
+            self.do_create(tmp.name, pre, suf, recurse-1)
+        with open(os.path.join(tmp.name, "test.txt"), "wb") as f:
+            f.write(b"Hello world!")
+        return tmp
+
+    def test_mkdtemp_failure(self) -> None:
+        # Check no additional exception if mkdtemp fails
+        # Previously would raise AttributeError instead
+        # (noted as part of Issue #10188)
+        with tempfile.TemporaryDirectory() as nonexistent:
+            pass
+        with self.assertRaises(os.error):
+            tempfile.TemporaryDirectory(dir=nonexistent)
+
+    def test_explicit_cleanup(self) -> None:
+        # A TemporaryDirectory is deleted when cleaned up
+        dir = tempfile.mkdtemp()
+        try:
+            d = self.do_create(dir=dir)
+            self.assertTrue(os.path.exists(d.name),
+                            "TemporaryDirectory %s does not exist" % d.name)
+            d.cleanup()
+            self.assertFalse(os.path.exists(d.name),
+                        "TemporaryDirectory %s exists after cleanup" % d.name)
+        finally:
+            os.rmdir(dir)
+
+    @support.skip_unless_symlink
+    def test_cleanup_with_symlink_to_a_directory(self) -> None:
+        # cleanup() should not follow symlinks to directories (issue #12464)
+        d1 = self.do_create()
+        d2 = self.do_create()
+
+        # Symlink d1/foo -> d2
+        os.symlink(d2.name, os.path.join(d1.name, "foo"))
+
+        # This call to cleanup() should not follow the "foo" symlink
+        d1.cleanup()
+
+        self.assertFalse(os.path.exists(d1.name),
+                         "TemporaryDirectory %s exists after cleanup" % d1.name)
+        self.assertTrue(os.path.exists(d2.name),
+                        "Directory pointed to by a symlink was deleted")
+        self.assertEqual(os.listdir(d2.name), ['test.txt'],
+                         "Contents of the directory pointed to by a symlink "
+                         "were deleted")
+        d2.cleanup()
+
+    @support.cpython_only
+    def test_del_on_collection(self) -> None:
+        # A TemporaryDirectory is deleted when garbage collected
+        dir = tempfile.mkdtemp()
+        try:
+            d = self.do_create(dir=dir)
+            name = d.name
+            del d # Rely on refcounting to invoke __del__
+            self.assertFalse(os.path.exists(name),
+                        "TemporaryDirectory %s exists after __del__" % name)
+        finally:
+            os.rmdir(dir)
+
+    @unittest.expectedFailure # See issue #10188
+    def test_del_on_shutdown(self) -> None:
+        # A TemporaryDirectory may be cleaned up during shutdown
+        # Make sure it works with the relevant modules nulled out
+        with self.do_create() as dir:
+            d = self.do_create(dir=dir)
+            # Mimic the nulling out of modules that
+            # occurs during system shutdown
+            modules = [os, os.path]
+            if has_stat:
+                modules.append(stat)
+            # Currently broken, so suppress the warning
+            # that is otherwise emitted on stdout
+            with support.captured_stderr() as err:
+                with NulledModules(*modules):
+                    d.cleanup()
+            # Currently broken, so stop spurious exception by
+            # indicating the object has already been closed
+            d._closed = True
+            # And this assert will fail, as expected by the
+            # unittest decorator...
+            self.assertFalse(os.path.exists(d.name),
+                        "TemporaryDirectory %s exists after cleanup" % d.name)
+
+    def test_warnings_on_cleanup(self) -> None:
+        # Two kinds of warning on shutdown
+        #   Issue 10888: may write to stderr if modules are nulled out
+        #   ResourceWarning will be triggered by __del__
+        with self.do_create() as dir:
+            if os.sep != '\\':
+                # Embed a backslash in order to make sure string escaping
+                # in the displayed error message is dealt with correctly
+                suffix = '\\check_backslash_handling'
+            else:
+                suffix = ''
+            d = self.do_create(dir=dir, suf=suffix)
+
+            #Check for the Issue 10888 message
+            modules = [os, os.path]
+            if has_stat:
+                modules.append(stat)
+            with support.captured_stderr() as err:
+                with NulledModules(*modules):
+                    d.cleanup()
+            message = err.getvalue().replace('\\\\', '\\')
+            self.assertIn("while cleaning up",  message)
+            self.assertIn(d.name,  message)
+
+            # Check for the resource warning
+            with support.check_warnings(('Implicitly', ResourceWarning), quiet=False):
+                warnings.filterwarnings("always", category=ResourceWarning)
+                d.__del__()
+            self.assertFalse(os.path.exists(d.name),
+                        "TemporaryDirectory %s exists after __del__" % d.name)
+
+    def test_multiple_close(self) -> None:
+        # Can be cleaned-up many times without error
+        d = self.do_create()
+        d.cleanup()
+        try:
+            d.cleanup()
+            d.cleanup()
+        except:
+            self.failOnException("cleanup")
+
+    def test_context_manager(self) -> None:
+        # Can be used as a context manager
+        d = self.do_create()
+        with d as name:
+            self.assertTrue(os.path.exists(name))
+            self.assertEqual(name, d.name)
+        self.assertFalse(os.path.exists(name))
+
+
+test_classes.append(test_TemporaryDirectory)
+
+def test_main() -> None:
+    support.run_unittest(*test_classes)
+
+if __name__ == "__main__":
+    test_main()
diff --git a/test-data/stdlib-samples/3.2/test/test_textwrap.py b/test-data/stdlib-samples/3.2/test/test_textwrap.py
new file mode 100644
index 0000000..79d921a
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/test_textwrap.py
@@ -0,0 +1,601 @@
+#
+# Test suite for the textwrap module.
+#
+# Original tests written by Greg Ward <gward at python.net>.
+# Converted to PyUnit by Peter Hansen <peter at engcorp.com>.
+# Currently maintained by Greg Ward.
+#
+# $Id$
+#
+
+import unittest
+from test import support
+
+from typing import Any, List, Sequence
+
+from textwrap import TextWrapper, wrap, fill, dedent
+
+
+class BaseTestCase(unittest.TestCase):
+    '''Parent class with utility methods for textwrap tests.'''
+
+    wrapper = None  # type: TextWrapper
+
+    def show(self, textin: Sequence[str]) -> str:
+        if isinstance(textin, list):
+            results = []  # type: List[str]
+            for i in range(len(textin)):
+                results.append("  %d: %r" % (i, textin[i]))
+            result = '\n'.join(results)
+        elif isinstance(textin, str):
+            result = "  %s\n" % repr(textin)
+        return result
+
+
+    def check(self, result: Sequence[str], expect: Sequence[str]) -> None:
+        self.assertEqual(result, expect,
+            'expected:\n%s\nbut got:\n%s' % (
+                self.show(expect), self.show(result)))
+
+    def check_wrap(self, text: str, width: int, expect: Sequence[str],
+                   **kwargs: Any) -> None:
+        result = wrap(text, width, **kwargs)
+        self.check(result, expect)
+
+    def check_split(self, text: str, expect: Sequence[str]) -> None:
+        result = self.wrapper._split(text)
+        self.assertEqual(result, expect,
+                         "\nexpected %r\n"
+                         "but got  %r" % (expect, result))
+
+
+class WrapTestCase(BaseTestCase):
+
+    def setUp(self) -> None:
+        self.wrapper = TextWrapper(width=45)
+
+    def test_simple(self) -> None:
+        # Simple case: just words, spaces, and a bit of punctuation
+
+        text = "Hello there, how are you this fine day?  I'm glad to hear it!"
+
+        self.check_wrap(text, 12,
+                        ["Hello there,",
+                         "how are you",
+                         "this fine",
+                         "day?  I'm",
+                         "glad to hear",
+                         "it!"])
+        self.check_wrap(text, 42,
+                        ["Hello there, how are you this fine day?",
+                         "I'm glad to hear it!"])
+        self.check_wrap(text, 80, [text])
+
+
+    def test_whitespace(self) -> None:
+        # Whitespace munging and end-of-sentence detection
+
+        text = """\
+This is a paragraph that already has
+line breaks.  But some of its lines are much longer than the others,
+so it needs to be wrapped.
+Some lines are \ttabbed too.
+What a mess!
+"""
+
+        expect = ["This is a paragraph that already has line",
+                  "breaks.  But some of its lines are much",
+                  "longer than the others, so it needs to be",
+                  "wrapped.  Some lines are  tabbed too.  What a",
+                  "mess!"]
+
+        wrapper = TextWrapper(45, fix_sentence_endings=True)
+        result = wrapper.wrap(text)
+        self.check(result, expect)
+
+        results = wrapper.fill(text)
+        self.check(results, '\n'.join(expect))
+
+    def test_fix_sentence_endings(self) -> None:
+        wrapper = TextWrapper(60, fix_sentence_endings=True)
+
+        # SF #847346: ensure that fix_sentence_endings=True does the
+        # right thing even on input short enough that it doesn't need to
+        # be wrapped.
+        text = "A short line. Note the single space."
+        expect = ["A short line.  Note the single space."]
+        self.check(wrapper.wrap(text), expect)
+
+        # Test some of the hairy end cases that _fix_sentence_endings()
+        # is supposed to handle (the easy stuff is tested in
+        # test_whitespace() above).
+        text = "Well, Doctor? What do you think?"
+        expect = ["Well, Doctor?  What do you think?"]
+        self.check(wrapper.wrap(text), expect)
+
+        text = "Well, Doctor?\nWhat do you think?"
+        self.check(wrapper.wrap(text), expect)
+
+        text = 'I say, chaps! Anyone for "tennis?"\nHmmph!'
+        expect = ['I say, chaps!  Anyone for "tennis?"  Hmmph!']
+        self.check(wrapper.wrap(text), expect)
+
+        wrapper.width = 20
+        expect = ['I say, chaps!', 'Anyone for "tennis?"', 'Hmmph!']
+        self.check(wrapper.wrap(text), expect)
+
+        text = 'And she said, "Go to hell!"\nCan you believe that?'
+        expect = ['And she said, "Go to',
+                  'hell!"  Can you',
+                  'believe that?']
+        self.check(wrapper.wrap(text), expect)
+
+        wrapper.width = 60
+        expect = ['And she said, "Go to hell!"  Can you believe that?']
+        self.check(wrapper.wrap(text), expect)
+
+        text = 'File stdio.h is nice.'
+        expect = ['File stdio.h is nice.']
+        self.check(wrapper.wrap(text), expect)
+
+    def test_wrap_short(self) -> None:
+        # Wrapping to make short lines longer
+
+        text = "This is a\nshort paragraph."
+
+        self.check_wrap(text, 20, ["This is a short",
+                                   "paragraph."])
+        self.check_wrap(text, 40, ["This is a short paragraph."])
+
+
+    def test_wrap_short_1line(self) -> None:
+        # Test endcases
+
+        text = "This is a short line."
+
+        self.check_wrap(text, 30, ["This is a short line."])
+        self.check_wrap(text, 30, ["(1) This is a short line."],
+                        initial_indent="(1) ")
+
+
+    def test_hyphenated(self) -> None:
+        # Test breaking hyphenated words
+
+        text = ("this-is-a-useful-feature-for-"
+                "reformatting-posts-from-tim-peters'ly")
+
+        self.check_wrap(text, 40,
+                        ["this-is-a-useful-feature-for-",
+                         "reformatting-posts-from-tim-peters'ly"])
+        self.check_wrap(text, 41,
+                        ["this-is-a-useful-feature-for-",
+                         "reformatting-posts-from-tim-peters'ly"])
+        self.check_wrap(text, 42,
+                        ["this-is-a-useful-feature-for-reformatting-",
+                         "posts-from-tim-peters'ly"])
+
+    def test_hyphenated_numbers(self) -> None:
+        # Test that hyphenated numbers (eg. dates) are not broken like words.
+        text = ("Python 1.0.0 was released on 1994-01-26.  Python 1.0.1 was\n"
+                "released on 1994-02-15.")
+
+        self.check_wrap(text, 30, ['Python 1.0.0 was released on',
+                                   '1994-01-26.  Python 1.0.1 was',
+                                   'released on 1994-02-15.'])
+        self.check_wrap(text, 40, ['Python 1.0.0 was released on 1994-01-26.',
+                                   'Python 1.0.1 was released on 1994-02-15.'])
+
+        text = "I do all my shopping at 7-11."
+        self.check_wrap(text, 25, ["I do all my shopping at",
+                                   "7-11."])
+        self.check_wrap(text, 27, ["I do all my shopping at",
+                                   "7-11."])
+        self.check_wrap(text, 29, ["I do all my shopping at 7-11."])
+
+    def test_em_dash(self) -> None:
+        # Test text with em-dashes
+        text = "Em-dashes should be written -- thus."
+        self.check_wrap(text, 25,
+                        ["Em-dashes should be",
+                         "written -- thus."])
+
+        # Probe the boundaries of the properly written em-dash,
+        # ie. " -- ".
+        self.check_wrap(text, 29,
+                        ["Em-dashes should be written",
+                         "-- thus."])
+        expect = ["Em-dashes should be written --",
+                  "thus."]
+        self.check_wrap(text, 30, expect)
+        self.check_wrap(text, 35, expect)
+        self.check_wrap(text, 36,
+                        ["Em-dashes should be written -- thus."])
+
+        # The improperly written em-dash is handled too, because
+        # it's adjacent to non-whitespace on both sides.
+        text = "You can also do--this or even---this."
+        expect = ["You can also do",
+                  "--this or even",
+                  "---this."]
+        self.check_wrap(text, 15, expect)
+        self.check_wrap(text, 16, expect)
+        expect = ["You can also do--",
+                  "this or even---",
+                  "this."]
+        self.check_wrap(text, 17, expect)
+        self.check_wrap(text, 19, expect)
+        expect = ["You can also do--this or even",
+                  "---this."]
+        self.check_wrap(text, 29, expect)
+        self.check_wrap(text, 31, expect)
+        expect = ["You can also do--this or even---",
+                  "this."]
+        self.check_wrap(text, 32, expect)
+        self.check_wrap(text, 35, expect)
+
+        # All of the above behaviour could be deduced by probing the
+        # _split() method.
+        text = "Here's an -- em-dash and--here's another---and another!"
+        expect = ["Here's", " ", "an", " ", "--", " ", "em-", "dash", " ",
+                  "and", "--", "here's", " ", "another", "---",
+                  "and", " ", "another!"]
+        self.check_split(text, expect)
+
+        text = "and then--bam!--he was gone"
+        expect = ["and", " ", "then", "--", "bam!", "--",
+                  "he", " ", "was", " ", "gone"]
+        self.check_split(text, expect)
+
+
+    def test_unix_options (self) -> None:
+        # Test that Unix-style command-line options are wrapped correctly.
+        # Both Optik (OptionParser) and Docutils rely on this behaviour!
+
+        text = "You should use the -n option, or --dry-run in its long form."
+        self.check_wrap(text, 20,
+                        ["You should use the",
+                         "-n option, or --dry-",
+                         "run in its long",
+                         "form."])
+        self.check_wrap(text, 21,
+                        ["You should use the -n",
+                         "option, or --dry-run",
+                         "in its long form."])
+        expect = ["You should use the -n option, or",
+                  "--dry-run in its long form."]
+        self.check_wrap(text, 32, expect)
+        self.check_wrap(text, 34, expect)
+        self.check_wrap(text, 35, expect)
+        self.check_wrap(text, 38, expect)
+        expect = ["You should use the -n option, or --dry-",
+                  "run in its long form."]
+        self.check_wrap(text, 39, expect)
+        self.check_wrap(text, 41, expect)
+        expect = ["You should use the -n option, or --dry-run",
+                  "in its long form."]
+        self.check_wrap(text, 42, expect)
+
+        # Again, all of the above can be deduced from _split().
+        text = "the -n option, or --dry-run or --dryrun"
+        expect = ["the", " ", "-n", " ", "option,", " ", "or", " ",
+                  "--dry-", "run", " ", "or", " ", "--dryrun"]
+        self.check_split(text, expect)
+
+    def test_funky_hyphens (self) -> None:
+        # Screwy edge cases cooked up by David Goodger.  All reported
+        # in SF bug #596434.
+        self.check_split("what the--hey!", ["what", " ", "the", "--", "hey!"])
+        self.check_split("what the--", ["what", " ", "the--"])
+        self.check_split("what the--.", ["what", " ", "the--."])
+        self.check_split("--text--.", ["--text--."])
+
+        # When I first read bug #596434, this is what I thought David
+        # was talking about.  I was wrong; these have always worked
+        # fine.  The real problem is tested in test_funky_parens()
+        # below...
+        self.check_split("--option", ["--option"])
+        self.check_split("--option-opt", ["--option-", "opt"])
+        self.check_split("foo --option-opt bar",
+                         ["foo", " ", "--option-", "opt", " ", "bar"])
+
+    def test_punct_hyphens(self) -> None:
+        # Oh bother, SF #965425 found another problem with hyphens --
+        # hyphenated words in single quotes weren't handled correctly.
+        # In fact, the bug is that *any* punctuation around a hyphenated
+        # word was handled incorrectly, except for a leading "--", which
+        # was special-cased for Optik and Docutils.  So test a variety
+        # of styles of punctuation around a hyphenated word.
+        # (Actually this is based on an Optik bug report, #813077).
+        self.check_split("the 'wibble-wobble' widget",
+                         ['the', ' ', "'wibble-", "wobble'", ' ', 'widget'])
+        self.check_split('the "wibble-wobble" widget',
+                         ['the', ' ', '"wibble-', 'wobble"', ' ', 'widget'])
+        self.check_split("the (wibble-wobble) widget",
+                         ['the', ' ', "(wibble-", "wobble)", ' ', 'widget'])
+        self.check_split("the ['wibble-wobble'] widget",
+                         ['the', ' ', "['wibble-", "wobble']", ' ', 'widget'])
+
+    def test_funky_parens (self) -> None:
+        # Second part of SF bug #596434: long option strings inside
+        # parentheses.
+        self.check_split("foo (--option) bar",
+                         ["foo", " ", "(--option)", " ", "bar"])
+
+        # Related stuff -- make sure parens work in simpler contexts.
+        self.check_split("foo (bar) baz",
+                         ["foo", " ", "(bar)", " ", "baz"])
+        self.check_split("blah (ding dong), wubba",
+                         ["blah", " ", "(ding", " ", "dong),",
+                          " ", "wubba"])
+
+    def test_initial_whitespace(self) -> None:
+        # SF bug #622849 reported inconsistent handling of leading
+        # whitespace; let's test that a bit, shall we?
+        text = " This is a sentence with leading whitespace."
+        self.check_wrap(text, 50,
+                        [" This is a sentence with leading whitespace."])
+        self.check_wrap(text, 30,
+                        [" This is a sentence with", "leading whitespace."])
+
+    def test_no_drop_whitespace(self) -> None:
+        # SF patch #1581073
+        text = " This is a    sentence with     much whitespace."
+        self.check_wrap(text, 10,
+                        [" This is a", "    ", "sentence ",
+                         "with     ", "much white", "space."],
+                        drop_whitespace=False)
+
+    def test_split(self) -> None:
+        # Ensure that the standard _split() method works as advertised
+        # in the comments
+
+        text = "Hello there -- you goof-ball, use the -b option!"
+
+        result = self.wrapper._split(text)
+        self.check(result,
+             ["Hello", " ", "there", " ", "--", " ", "you", " ", "goof-",
+              "ball,", " ", "use", " ", "the", " ", "-b", " ",  "option!"])
+
+    def test_break_on_hyphens(self) -> None:
+        # Ensure that the break_on_hyphens attributes work
+        text = "yaba daba-doo"
+        self.check_wrap(text, 10, ["yaba daba-", "doo"],
+                        break_on_hyphens=True)
+        self.check_wrap(text, 10, ["yaba", "daba-doo"],
+                        break_on_hyphens=False)
+
+    def test_bad_width(self) -> None:
+        # Ensure that width <= 0 is caught.
+        text = "Whatever, it doesn't matter."
+        self.assertRaises(ValueError, wrap, text, 0)
+        self.assertRaises(ValueError, wrap, text, -1)
+
+    def test_no_split_at_umlaut(self) -> None:
+        text = "Die Empf\xe4nger-Auswahl"
+        self.check_wrap(text, 13, ["Die", "Empf\xe4nger-", "Auswahl"])
+
+    def test_umlaut_followed_by_dash(self) -> None:
+        text = "aa \xe4\xe4-\xe4\xe4"
+        self.check_wrap(text, 7, ["aa \xe4\xe4-", "\xe4\xe4"])
+
+
+class LongWordTestCase (BaseTestCase):
+    def setUp(self) -> None:
+        self.wrapper = TextWrapper()
+        self.text = '''\
+Did you say "supercalifragilisticexpialidocious?"
+How *do* you spell that odd word, anyways?
+'''
+
+    def test_break_long(self) -> None:
+        # Wrap text with long words and lots of punctuation
+
+        self.check_wrap(self.text, 30,
+                        ['Did you say "supercalifragilis',
+                         'ticexpialidocious?" How *do*',
+                         'you spell that odd word,',
+                         'anyways?'])
+        self.check_wrap(self.text, 50,
+                        ['Did you say "supercalifragilisticexpialidocious?"',
+                         'How *do* you spell that odd word, anyways?'])
+
+        # SF bug 797650.  Prevent an infinite loop by making sure that at
+        # least one character gets split off on every pass.
+        self.check_wrap('-'*10+'hello', 10,
+                        ['----------',
+                         '               h',
+                         '               e',
+                         '               l',
+                         '               l',
+                         '               o'],
+                        subsequent_indent = ' '*15)
+
+        # bug 1146.  Prevent a long word to be wrongly wrapped when the
+        # preceding word is exactly one character shorter than the width
+        self.check_wrap(self.text, 12,
+                        ['Did you say ',
+                         '"supercalifr',
+                         'agilisticexp',
+                         'ialidocious?',
+                         '" How *do*',
+                         'you spell',
+                         'that odd',
+                         'word,',
+                         'anyways?'])
+
+    def test_nobreak_long(self) -> None:
+        # Test with break_long_words disabled
+        self.wrapper.break_long_words = False
+        self.wrapper.width = 30
+        expect = ['Did you say',
+                  '"supercalifragilisticexpialidocious?"',
+                  'How *do* you spell that odd',
+                  'word, anyways?'
+                  ]
+        result = self.wrapper.wrap(self.text)
+        self.check(result, expect)
+
+        # Same thing with kwargs passed to standalone wrap() function.
+        result = wrap(self.text, width=30, break_long_words=0)
+        self.check(result, expect)
+
+
+class IndentTestCases(BaseTestCase):
+
+    # called before each test method
+    def setUp(self) -> None:
+        self.text = '''\
+This paragraph will be filled, first without any indentation,
+and then with some (including a hanging indent).'''
+
+
+    def test_fill(self) -> None:
+        # Test the fill() method
+
+        expect = '''\
+This paragraph will be filled, first
+without any indentation, and then with
+some (including a hanging indent).'''
+
+        result = fill(self.text, 40)
+        self.check(result, expect)
+
+
+    def test_initial_indent(self) -> None:
+        # Test initial_indent parameter
+
+        expect = ["     This paragraph will be filled,",
+                  "first without any indentation, and then",
+                  "with some (including a hanging indent)."]
+        result = wrap(self.text, 40, initial_indent="     ")
+        self.check(result, expect)
+
+        expects = "\n".join(expect)
+        results = fill(self.text, 40, initial_indent="     ")
+        self.check(results, expects)
+
+
+    def test_subsequent_indent(self) -> None:
+        # Test subsequent_indent parameter
+
+        expect = '''\
+  * This paragraph will be filled, first
+    without any indentation, and then
+    with some (including a hanging
+    indent).'''
+
+        result = fill(self.text, 40,
+                      initial_indent="  * ", subsequent_indent="    ")
+        self.check(result, expect)
+
+
+# Despite the similar names, DedentTestCase is *not* the inverse
+# of IndentTestCase!
+class DedentTestCase(unittest.TestCase):
+
+    def assertUnchanged(self, text: str) -> None:
+        """assert that dedent() has no effect on 'text'"""
+        self.assertEqual(text, dedent(text))
+
+    def test_dedent_nomargin(self) -> None:
+        # No lines indented.
+        text = "Hello there.\nHow are you?\nOh good, I'm glad."
+        self.assertUnchanged(text)
+
+        # Similar, with a blank line.
+        text = "Hello there.\n\nBoo!"
+        self.assertUnchanged(text)
+
+        # Some lines indented, but overall margin is still zero.
+        text = "Hello there.\n  This is indented."
+        self.assertUnchanged(text)
+
+        # Again, add a blank line.
+        text = "Hello there.\n\n  Boo!\n"
+        self.assertUnchanged(text)
+
+    def test_dedent_even(self) -> None:
+        # All lines indented by two spaces.
+        text = "  Hello there.\n  How are ya?\n  Oh good."
+        expect = "Hello there.\nHow are ya?\nOh good."
+        self.assertEqual(expect, dedent(text))
+
+        # Same, with blank lines.
+        text = "  Hello there.\n\n  How are ya?\n  Oh good.\n"
+        expect = "Hello there.\n\nHow are ya?\nOh good.\n"
+        self.assertEqual(expect, dedent(text))
+
+        # Now indent one of the blank lines.
+        text = "  Hello there.\n  \n  How are ya?\n  Oh good.\n"
+        expect = "Hello there.\n\nHow are ya?\nOh good.\n"
+        self.assertEqual(expect, dedent(text))
+
+    def test_dedent_uneven(self) -> None:
+        # Lines indented unevenly.
+        text = '''\
+        def foo():
+            while 1:
+                return foo
+        '''
+        expect = '''\
+def foo():
+    while 1:
+        return foo
+'''
+        self.assertEqual(expect, dedent(text))
+
+        # Uneven indentation with a blank line.
+        text = "  Foo\n    Bar\n\n   Baz\n"
+        expect = "Foo\n  Bar\n\n Baz\n"
+        self.assertEqual(expect, dedent(text))
+
+        # Uneven indentation with a whitespace-only line.
+        text = "  Foo\n    Bar\n \n   Baz\n"
+        expect = "Foo\n  Bar\n\n Baz\n"
+        self.assertEqual(expect, dedent(text))
+
+    # dedent() should not mangle internal tabs
+    def test_dedent_preserve_internal_tabs(self) -> None:
+        text = "  hello\tthere\n  how are\tyou?"
+        expect = "hello\tthere\nhow are\tyou?"
+        self.assertEqual(expect, dedent(text))
+
+        # make sure that it preserves tabs when it's not making any
+        # changes at all
+        self.assertEqual(expect, dedent(expect))
+
+    # dedent() should not mangle tabs in the margin (i.e.
+    # tabs and spaces both count as margin, but are *not*
+    # considered equivalent)
+    def test_dedent_preserve_margin_tabs(self) -> None:
+        text = "  hello there\n\thow are you?"
+        self.assertUnchanged(text)
+
+        # same effect even if we have 8 spaces
+        text = "        hello there\n\thow are you?"
+        self.assertUnchanged(text)
+
+        # dedent() only removes whitespace that can be uniformly removed!
+        text = "\thello there\n\thow are you?"
+        expect = "hello there\nhow are you?"
+        self.assertEqual(expect, dedent(text))
+
+        text = "  \thello there\n  \thow are you?"
+        self.assertEqual(expect, dedent(text))
+
+        text = "  \t  hello there\n  \t  how are you?"
+        self.assertEqual(expect, dedent(text))
+
+        text = "  \thello there\n  \t  how are you?"
+        expect = "hello there\n  how are you?"
+        self.assertEqual(expect, dedent(text))
+
+
+def test_main() -> None:
+    support.run_unittest(WrapTestCase,
+                              LongWordTestCase,
+                              IndentTestCases,
+                              DedentTestCase)
+
+if __name__ == '__main__':
+    test_main()
diff --git a/test-data/stdlib-samples/3.2/test/tf_inherit_check.py b/test-data/stdlib-samples/3.2/test/tf_inherit_check.py
new file mode 100644
index 0000000..92ebd95
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/test/tf_inherit_check.py
@@ -0,0 +1,25 @@
+# Helper script for test_tempfile.py.  argv[2] is the number of a file
+# descriptor which should _not_ be open.  Check this by attempting to
+# write to it -- if we succeed, something is wrong.
+
+import sys
+import os
+
+verbose = (sys.argv[1] == 'v')
+try:
+    fd = int(sys.argv[2])
+
+    try:
+        os.write(fd, b"blat")
+    except os.error:
+        # Success -- could not write to fd.
+        sys.exit(0)
+    else:
+        if verbose:
+            sys.stderr.write("fd %d is open in child" % fd)
+        sys.exit(1)
+
+except Exception:
+    if verbose:
+        raise
+    sys.exit(1)
diff --git a/test-data/stdlib-samples/3.2/textwrap.py b/test-data/stdlib-samples/3.2/textwrap.py
new file mode 100644
index 0000000..a6d0266
--- /dev/null
+++ b/test-data/stdlib-samples/3.2/textwrap.py
@@ -0,0 +1,391 @@
+"""Text wrapping and filling.
+"""
+
+# Copyright (C) 1999-2001 Gregory P. Ward.
+# Copyright (C) 2002, 2003 Python Software Foundation.
+# Written by Greg Ward <gward at python.net>
+
+import string, re
+
+from typing import Dict, List, Any
+
+__all__ = ['TextWrapper', 'wrap', 'fill', 'dedent']
+
+# Hardcode the recognized whitespace characters to the US-ASCII
+# whitespace characters.  The main reason for doing this is that in
+# ISO-8859-1, 0xa0 is non-breaking whitespace, so in certain locales
+# that character winds up in string.whitespace.  Respecting
+# string.whitespace in those cases would 1) make textwrap treat 0xa0 the
+# same as any other whitespace char, which is clearly wrong (it's a
+# *non-breaking* space), 2) possibly cause problems with Unicode,
+# since 0xa0 is not in range(128).
+_whitespace = '\t\n\x0b\x0c\r '
+
+class TextWrapper:
+    """
+    Object for wrapping/filling text.  The public interface consists of
+    the wrap() and fill() methods; the other methods are just there for
+    subclasses to override in order to tweak the default behaviour.
+    If you want to completely replace the main wrapping algorithm,
+    you'll probably have to override _wrap_chunks().
+
+    Several instance attributes control various aspects of wrapping:
+      width (default: 70)
+        the maximum width of wrapped lines (unless break_long_words
+        is false)
+      initial_indent (default: "")
+        string that will be prepended to the first line of wrapped
+        output.  Counts towards the line's width.
+      subsequent_indent (default: "")
+        string that will be prepended to all lines save the first
+        of wrapped output; also counts towards each line's width.
+      expand_tabs (default: true)
+        Expand tabs in input text to spaces before further processing.
+        Each tab will become 1 .. 8 spaces, depending on its position in
+        its line.  If false, each tab is treated as a single character.
+      replace_whitespace (default: true)
+        Replace all whitespace characters in the input text by spaces
+        after tab expansion.  Note that if expand_tabs is false and
+        replace_whitespace is true, every tab will be converted to a
+        single space!
+      fix_sentence_endings (default: false)
+        Ensure that sentence-ending punctuation is always followed
+        by two spaces.  Off by default because the algorithm is
+        (unavoidably) imperfect.
+      break_long_words (default: true)
+        Break words longer than 'width'.  If false, those words will not
+        be broken, and some lines might be longer than 'width'.
+      break_on_hyphens (default: true)
+        Allow breaking hyphenated words. If true, wrapping will occur
+        preferably on whitespaces and right after hyphens part of
+        compound words.
+      drop_whitespace (default: true)
+        Drop leading and trailing whitespace from lines.
+    """
+
+    unicode_whitespace_trans = {}  # type: Dict[int, int]
+    uspace = ord(' ')
+    for x in _whitespace:
+        unicode_whitespace_trans[ord(x)] = uspace
+
+    # This funky little regex is just the trick for splitting
+    # text up into word-wrappable chunks.  E.g.
+    #   "Hello there -- you goof-ball, use the -b option!"
+    # splits into
+    #   Hello/ /there/ /--/ /you/ /goof-/ball,/ /use/ /the/ /-b/ /option!
+    # (after stripping out empty strings).
+    wordsep_re = re.compile(
+        r'(\s+|'                                  # any whitespace
+        r'[^\s\w]*\w+[^0-9\W]-(?=\w+[^0-9\W])|'   # hyphenated words
+        r'(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))')   # em-dash
+
+    # This less funky little regex just split on recognized spaces. E.g.
+    #   "Hello there -- you goof-ball, use the -b option!"
+    # splits into
+    #   Hello/ /there/ /--/ /you/ /goof-ball,/ /use/ /the/ /-b/ /option!/
+    wordsep_simple_re = re.compile(r'(\s+)')
+
+    # XXX this is not locale- or charset-aware -- string.lowercase
+    # is US-ASCII only (and therefore English-only)
+    sentence_end_re = re.compile(r'[a-z]'             # lowercase letter
+                                 r'[\.\!\?]'          # sentence-ending punct.
+                                 r'[\"\']?'           # optional end-of-quote
+                                 r'\Z')               # end of chunk
+
+
+    def __init__(self,
+                 width: int = 70,
+                 initial_indent: str = "",
+                 subsequent_indent: str = "",
+                 expand_tabs: bool = True,
+                 replace_whitespace: bool = True,
+                 fix_sentence_endings: bool = False,
+                 break_long_words: bool = True,
+                 drop_whitespace: bool = True,
+                 break_on_hyphens: bool = True) -> None:
+        self.width = width
+        self.initial_indent = initial_indent
+        self.subsequent_indent = subsequent_indent
+        self.expand_tabs = expand_tabs
+        self.replace_whitespace = replace_whitespace
+        self.fix_sentence_endings = fix_sentence_endings
+        self.break_long_words = break_long_words
+        self.drop_whitespace = drop_whitespace
+        self.break_on_hyphens = break_on_hyphens
+
+
+    # -- Private methods -----------------------------------------------
+    # (possibly useful for subclasses to override)
+
+    def _munge_whitespace(self, text: str) -> str:
+        """_munge_whitespace(text : string) -> string
+
+        Munge whitespace in text: expand tabs and convert all other
+        whitespace characters to spaces.  Eg. " foo\tbar\n\nbaz"
+        becomes " foo    bar  baz".
+        """
+        if self.expand_tabs:
+            text = text.expandtabs()
+        if self.replace_whitespace:
+            text = text.translate(self.unicode_whitespace_trans)
+        return text
+
+
+    def _split(self, text: str) -> List[str]:
+        """_split(text : string) -> [string]
+
+        Split the text to wrap into indivisible chunks.  Chunks are
+        not quite the same as words; see _wrap_chunks() for full
+        details.  As an example, the text
+          Look, goof-ball -- use the -b option!
+        breaks into the following chunks:
+          'Look,', ' ', 'goof-', 'ball', ' ', '--', ' ',
+          'use', ' ', 'the', ' ', '-b', ' ', 'option!'
+        if break_on_hyphens is True, or in:
+          'Look,', ' ', 'goof-ball', ' ', '--', ' ',
+          'use', ' ', 'the', ' ', '-b', ' ', option!'
+        otherwise.
+        """
+        if self.break_on_hyphens is True:
+            chunks = self.wordsep_re.split(text)
+        else:
+            chunks = self.wordsep_simple_re.split(text)
+        chunks = [c for c in chunks if c]
+        return chunks
+
+    def _fix_sentence_endings(self, chunks: List[str]) -> None:
+        """_fix_sentence_endings(chunks : [string])
+
+        Correct for sentence endings buried in 'chunks'.  Eg. when the
+        original text contains "... foo.\nBar ...", munge_whitespace()
+        and split() will convert that to [..., "foo.", " ", "Bar", ...]
+        which has one too few spaces; this method simply changes the one
+        space to two.
+        """
+        i = 0
+        patsearch = self.sentence_end_re.search
+        while i < len(chunks)-1:
+            if chunks[i+1] == " " and patsearch(chunks[i]):
+                chunks[i+1] = "  "
+                i += 2
+            else:
+                i += 1
+
+    def _handle_long_word(self, reversed_chunks: List[str],
+                          cur_line: List[str], cur_len: int,
+                          width: int) -> None:
+        """_handle_long_word(chunks : [string],
+                             cur_line : [string],
+                             cur_len : int, width : int)
+
+        Handle a chunk of text (most likely a word, not whitespace) that
+        is too long to fit in any line.
+        """
+        # Figure out when indent is larger than the specified width, and make
+        # sure at least one character is stripped off on every pass
+        if width < 1:
+            space_left = 1
+        else:
+            space_left = width - cur_len
+
+        # If we're allowed to break long words, then do so: put as much
+        # of the next chunk onto the current line as will fit.
+        if self.break_long_words:
+            cur_line.append(reversed_chunks[-1][:space_left])
+            reversed_chunks[-1] = reversed_chunks[-1][space_left:]
+
+        # Otherwise, we have to preserve the long word intact.  Only add
+        # it to the current line if there's nothing already there --
+        # that minimizes how much we violate the width constraint.
+        elif not cur_line:
+            cur_line.append(reversed_chunks.pop())
+
+        # If we're not allowed to break long words, and there's already
+        # text on the current line, do nothing.  Next time through the
+        # main loop of _wrap_chunks(), we'll wind up here again, but
+        # cur_len will be zero, so the next line will be entirely
+        # devoted to the long word that we can't handle right now.
+
+    def _wrap_chunks(self, chunks: List[str]) -> List[str]:
+        """_wrap_chunks(chunks : [string]) -> [string]
+
+        Wrap a sequence of text chunks and return a list of lines of
+        length 'self.width' or less.  (If 'break_long_words' is false,
+        some lines may be longer than this.)  Chunks correspond roughly
+        to words and the whitespace between them: each chunk is
+        indivisible (modulo 'break_long_words'), but a line break can
+        come between any two chunks.  Chunks should not have internal
+        whitespace; ie. a chunk is either all whitespace or a "word".
+        Whitespace chunks will be removed from the beginning and end of
+        lines, but apart from that whitespace is preserved.
+        """
+        lines = []  # type: List[str]
+        if self.width <= 0:
+            raise ValueError("invalid width %r (must be > 0)" % self.width)
+
+        # Arrange in reverse order so items can be efficiently popped
+        # from a stack of chucks.
+        chunks.reverse()
+
+        while chunks:
+
+            # Start the list of chunks that will make up the current line.
+            # cur_len is just the length of all the chunks in cur_line.
+            cur_line = []  # type: List[str]
+            cur_len = 0
+
+            # Figure out which static string will prefix this line.
+            if lines:
+                indent = self.subsequent_indent
+            else:
+                indent = self.initial_indent
+
+            # Maximum width for this line.
+            width = self.width - len(indent)
+
+            # First chunk on line is whitespace -- drop it, unless this
+            # is the very beginning of the text (ie. no lines started yet).
+            if self.drop_whitespace and chunks[-1].strip() == '' and lines:
+                del chunks[-1]
+
+            while chunks:
+                l = len(chunks[-1])
+
+                # Can at least squeeze this chunk onto the current line.
+                if cur_len + l <= width:
+                    cur_line.append(chunks.pop())
+                    cur_len += l
+
+                # Nope, this line is full.
+                else:
+                    break
+
+            # The current line is full, and the next chunk is too big to
+            # fit on *any* line (not just this one).
+            if chunks and len(chunks[-1]) > width:
+                self._handle_long_word(chunks, cur_line, cur_len, width)
+
+            # If the last chunk on this line is all whitespace, drop it.
+            if self.drop_whitespace and cur_line and cur_line[-1].strip() == '':
+                del cur_line[-1]
+
+            # Convert current line back to a string and store it in list
+            # of all lines (return value).
+            if cur_line:
+                lines.append(indent + ''.join(cur_line))
+
+        return lines
+
+
+    # -- Public interface ----------------------------------------------
+
+    def wrap(self, text: str) -> List[str]:
+        """wrap(text : string) -> [string]
+
+        Reformat the single paragraph in 'text' so it fits in lines of
+        no more than 'self.width' columns, and return a list of wrapped
+        lines.  Tabs in 'text' are expanded with string.expandtabs(),
+        and all other whitespace characters (including newline) are
+        converted to space.
+        """
+        text = self._munge_whitespace(text)
+        chunks = self._split(text)
+        if self.fix_sentence_endings:
+            self._fix_sentence_endings(chunks)
+        return self._wrap_chunks(chunks)
+
+    def fill(self, text: str) -> str:
+        """fill(text : string) -> string
+
+        Reformat the single paragraph in 'text' to fit in lines of no
+        more than 'self.width' columns, and return a new string
+        containing the entire wrapped paragraph.
+        """
+        return "\n".join(self.wrap(text))
+
+
+# -- Convenience interface ---------------------------------------------
+
+def wrap(text: str, width: int = 70, **kwargs: Any) -> List[str]:
+    """Wrap a single paragraph of text, returning a list of wrapped lines.
+
+    Reformat the single paragraph in 'text' so it fits in lines of no
+    more than 'width' columns, and return a list of wrapped lines.  By
+    default, tabs in 'text' are expanded with string.expandtabs(), and
+    all other whitespace characters (including newline) are converted to
+    space.  See TextWrapper class for available keyword args to customize
+    wrapping behaviour.
+    """
+    w = TextWrapper(width=width, **kwargs)
+    return w.wrap(text)
+
+def fill(text: str, width: int = 70, **kwargs: Any) -> str:
+    """Fill a single paragraph of text, returning a new string.
+
+    Reformat the single paragraph in 'text' to fit in lines of no more
+    than 'width' columns, and return a new string containing the entire
+    wrapped paragraph.  As with wrap(), tabs are expanded and other
+    whitespace characters converted to space.  See TextWrapper class for
+    available keyword args to customize wrapping behaviour.
+    """
+    w = TextWrapper(width=width, **kwargs)
+    return w.fill(text)
+
+
+# -- Loosely related functionality -------------------------------------
+
+_whitespace_only_re = re.compile('^[ \t]+$', re.MULTILINE)
+_leading_whitespace_re = re.compile('(^[ \t]*)(?:[^ \t\n])', re.MULTILINE)
+
+def dedent(text: str) -> str:
+    """Remove any common leading whitespace from every line in `text`.
+
+    This can be used to make triple-quoted strings line up with the left
+    edge of the display, while still presenting them in the source code
+    in indented form.
+
+    Note that tabs and spaces are both treated as whitespace, but they
+    are not equal: the lines "  hello" and "\thello" are
+    considered to have no common leading whitespace.  (This behaviour is
+    new in Python 2.5; older versions of this module incorrectly
+    expanded tabs before searching for common leading whitespace.)
+    """
+    # Look for the longest leading string of spaces and tabs common to
+    # all lines.
+    margin = None # type: str
+    text = _whitespace_only_re.sub('', text)
+    indents = _leading_whitespace_re.findall(text)
+    for indent in indents:
+        if margin is None:
+            margin = indent
+
+        # Current line more deeply indented than previous winner:
+        # no change (previous winner is still on top).
+        elif indent.startswith(margin):
+            pass
+
+        # Current line consistent with and no deeper than previous winner:
+        # it's the new winner.
+        elif margin.startswith(indent):
+            margin = indent
+
+        # Current line and previous winner have no common whitespace:
+        # there is no margin.
+        else:
+            margin = ""
+            break
+
+    # sanity check (testing/debugging only)
+    if 0 and margin:
+        for line in text.split("\n"):
+            assert not line or line.startswith(margin), \
+                   "line = %r, margin = %r" % (line, margin)
+
+    if margin:
+        text = re.sub(r'(?m)^' + margin, '', text)
+    return text
+
+if __name__ == "__main__":
+    #print dedent("\tfoo\n\tbar")
+    #print dedent("  \thello there\n  \t  how are you?")
+    print(dedent("Hello there.\n  This is indented."))
diff --git a/test-data/unit/check-abstract.test b/test-data/unit/check-abstract.test
new file mode 100644
index 0000000..1caeabf
--- /dev/null
+++ b/test-data/unit/check-abstract.test
@@ -0,0 +1,734 @@
+-- Type checker test cases for abstract classes.
+
+
+-- Subtyping with abstract classes
+-- -------------------------------
+
+
+[case testAbstractClassSubclasses]
+
+from abc import abstractmethod, ABCMeta
+
+i = None # type: I
+j = None # type: J
+a = None # type: A
+b = None # type: B
+c = None # type: C
+
+j = c  # E: Incompatible types in assignment (expression has type "C", variable has type "J")
+a = i  # E: Incompatible types in assignment (expression has type "I", variable has type "A")
+a = j  # E: Incompatible types in assignment (expression has type "J", variable has type "A")
+b = i  # E: Incompatible types in assignment (expression has type "I", variable has type "B")
+
+i = a
+i = b
+i = c
+j = a
+j = b
+a = b
+
+class I(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self): pass
+class J(metaclass=ABCMeta):
+    @abstractmethod
+    def g(self): pass
+class A(I, J): pass
+class B(A): pass
+class C(I): pass
+
+[case testAbstractClassSubtypingViaExtension]
+
+from abc import abstractmethod, ABCMeta
+
+i = None # type: I
+j = None # type: J
+a = None # type: A
+o = None # type: object
+
+j = i # E: Incompatible types in assignment (expression has type "I", variable has type "J")
+a = i # E: Incompatible types in assignment (expression has type "I", variable has type "A")
+a = j # E: Incompatible types in assignment (expression has type "J", variable has type "A")
+i = o # E: Incompatible types in assignment (expression has type "object", variable has type "I")
+j = o # E: Incompatible types in assignment (expression has type "object", variable has type "J")
+
+i = a
+j = a
+i = j
+o = i
+o = j
+
+class I(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self): pass
+class J(I): pass
+class A(J): pass
+
+[case testInheritingAbstractClassInSubclass]
+
+from abc import abstractmethod, ABCMeta
+
+i = None # type: I
+a = None # type: A
+b = None # type: B
+
+i = a # E: Incompatible types in assignment (expression has type "A", variable has type "I")
+b = a # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+a = b
+i = b
+
+class I(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self): pass
+class A: pass
+class B(A, I): pass
+
+
+-- Abstract class objects
+-- ----------------------
+
+
+[case testAbstractClassAsTypeObject]
+
+from abc import abstractmethod, ABCMeta
+
+o = None # type: object
+t = None # type: type
+
+o = I
+t = I
+
+class I(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self): pass
+
+[case testAbstractClassInCasts]
+from typing import cast
+from abc import abstractmethod, ABCMeta
+
+class I(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self): pass
+class A(I): pass
+class B: pass
+
+i, a, b = None, None, None # type: (I, A, B)
+o = None # type: object
+
+a = cast(I, o) # E: Incompatible types in assignment (expression has type "I", variable has type "A")
+b = cast(B, i) # Ok; a subclass of B might inherit I
+i = cast(I, b) # Ok; a subclass of B might inherit I
+
+i = cast(I, o)
+i = cast(I, a)
+
+[case testInstantiatingClassThatImplementsAbstractMethod]
+from abc import abstractmethod, ABCMeta
+import typing
+class A(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self): pass
+class B(A):
+  def f(self): pass
+B()
+[out]
+
+[case testInstantiatingAbstractClass]
+from abc import abstractmethod, ABCMeta
+import typing
+class A(metaclass=ABCMeta): pass
+class B(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self): pass
+A() # OK
+B() # E: Cannot instantiate abstract class 'B' with abstract attribute 'f'
+[out]
+
+[case testInstantiatingClassWithInheritedAbstractMethod]
+from abc import abstractmethod, ABCMeta
+import typing
+class A(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self): pass
+  @abstractmethod
+  def g(self): pass
+class B(A): pass
+B()# E: Cannot instantiate abstract class 'B' with abstract attributes 'f' and 'g'
+[out]
+
+[case testInstantiatingClassWithInheritedAbstractMethodAndSuppression]
+from abc import abstractmethod, ABCMeta
+import typing
+class A(metaclass=ABCMeta):
+    @abstractmethod
+    def a(self): pass
+    @abstractmethod
+    def b(self): pass
+    @abstractmethod
+    def c(self): pass
+    @abstractmethod
+    def d(self): pass
+    @abstractmethod
+    def e(self): pass
+    @abstractmethod
+    def f(self): pass
+    @abstractmethod
+    def g(self): pass
+    @abstractmethod
+    def h(self): pass
+    @abstractmethod
+    def i(self): pass
+    @abstractmethod
+    def j(self): pass
+a = A() # E: Cannot instantiate abstract class 'A' with abstract attributes 'a', 'b', ... and 'j' (7 methods suppressed)
+[out]
+
+
+-- Implementing abstract methods
+-- -----------------------------
+
+
+[case testImplementingAbstractMethod]
+from abc import abstractmethod, ABCMeta
+import typing
+class A(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self, x: int) -> int: pass
+    @abstractmethod
+    def g(self, x: int) -> int: pass
+class B(A):
+    def f(self, x: str) -> int: \
+            # E: Argument 1 of "f" incompatible with supertype "A"
+        pass
+    def g(self, x: int) -> int: pass
+[out]
+
+[case testImplementingAbstractMethodWithMultipleBaseClasses]
+from abc import abstractmethod, ABCMeta
+import typing
+class I(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self, x: int) -> int: pass
+class J(metaclass=ABCMeta):
+    @abstractmethod
+    def g(self, x: str) -> str: pass
+class A(I, J):
+    def f(self, x: str) -> int: pass \
+        # E: Argument 1 of "f" incompatible with supertype "I"
+    def g(self, x: str) -> int: pass \
+        # E: Return type of "g" incompatible with supertype "J"
+    def h(self) -> int: pass # Not related to any base class
+[out]
+
+[case testImplementingAbstractMethodWithExtension]
+from abc import abstractmethod, ABCMeta
+import typing
+class J(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self, x: int) -> int: pass
+class I(J): pass
+class A(I):
+    def f(self, x: str) -> int: pass \
+        # E: Argument 1 of "f" incompatible with supertype "J"
+[out]
+
+[case testInvalidOverridingAbstractMethod]
+from abc import abstractmethod, ABCMeta
+import typing
+class J(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self, x: 'J') -> None: pass
+class I(J):
+    @abstractmethod
+    def f(self, x: 'I') -> None: pass # E: Argument 1 of "f" incompatible with supertype "J"
+[out]
+
+[case testAbstractClassCoAndContraVariance]
+from abc import abstractmethod, ABCMeta
+import typing
+class I(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self, a: A) -> 'I': pass
+    @abstractmethod
+    def g(self, a: A) -> 'I': pass
+    @abstractmethod
+    def h(self, a: 'I') -> A: pass
+class A(I):
+    def h(self, a: 'A') -> 'I': # Fail
+        pass
+    def f(self, a: 'I') -> 'I':
+        pass
+    def g(self, a: 'A') -> 'A':
+        pass
+[out]
+main:11: error: Argument 1 of "h" incompatible with supertype "I"
+main:11: error: Return type of "h" incompatible with supertype "I"
+
+
+-- Accessing abstract members
+-- --------------------------
+
+
+[case testAccessingAbstractMethod]
+
+from abc import abstractmethod, ABCMeta
+
+class I(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self, a: int) -> str: pass
+
+i, a, b = None, None, None # type: (I, int, str)
+
+a = i.f(a) # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+b = i.f(b) # E: Argument 1 to "f" of "I" has incompatible type "str"; expected "int"
+i.g()      # E: "I" has no attribute "g"
+
+b = i.f(a)
+
+[case testAccessingInheritedAbstractMethod]
+
+from abc import abstractmethod, ABCMeta
+
+class J(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self, a: int) -> str: pass
+class I(J): pass
+
+i, a, b = None, None, None # type: (I, int, str)
+
+a = i.f(1) # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+b = i.f(1)
+
+
+-- Any (dynamic) types
+-- -------------------
+
+
+[case testAbstractClassWithAllDynamicTypes]
+from abc import abstractmethod, ABCMeta
+import typing
+class I(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self, x): pass
+    @abstractmethod
+    def g(self, x): pass
+class A(I):
+    def f(self, x): pass
+    def g(self, x, y) -> None: pass \
+        # E: Signature of "g" incompatible with supertype "I"
+[out]
+
+[case testAbstractClassWithAllDynamicTypes2]
+from abc import abstractmethod, ABCMeta
+import typing
+class I(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self, x): pass
+    @abstractmethod
+    def g(self, x): pass
+class A(I):
+    def f(self, x): pass
+    def g(self, x, y): pass
+[out]
+
+[case testAbstractClassWithImplementationUsingDynamicTypes]
+from abc import abstractmethod, ABCMeta
+import typing
+class I(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self, x: int) -> None: pass
+    @abstractmethod
+    def g(self, x: int) -> None: pass
+class A(I):
+    def f(self, x): pass
+    def g(self, x, y): pass
+[out]
+
+
+-- Special cases
+-- -------------
+
+
+[case testMultipleAbstractBases]
+from abc import abstractmethod, ABCMeta
+import typing
+class A(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self) -> None: pass
+class B(metaclass=ABCMeta):
+  @abstractmethod
+  def g(self) -> None: pass
+class C(A, B):
+  @abstractmethod
+  def h(self) -> None: pass
+
+[case testMemberAccessWithMultipleAbstractBaseClasses]
+
+from abc import abstractmethod, ABCMeta
+
+class A(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self) -> None: pass
+class B(metaclass=ABCMeta):
+    @abstractmethod
+    def g(self) -> None: pass
+class C(A, B): pass
+x = None # type: C
+x.f()
+x.g()
+x.f(x) # E: Too many arguments for "f" of "A"
+x.g(x) # E: Too many arguments for "g" of "B"
+
+[case testInstantiatingAbstractClassWithMultipleBaseClasses]
+
+from abc import abstractmethod, ABCMeta
+
+class A(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self) -> None: pass
+class B(metaclass=ABCMeta):
+  @abstractmethod
+  def g(self) -> None: pass
+class C(A, B):
+  def f(self) -> None: pass
+class D(A, B):
+  def g(self) -> None: pass
+class E(A, B):
+  def f(self) -> None: pass
+  def g(self) -> None: pass
+C() # E: Cannot instantiate abstract class 'C' with abstract attribute 'g'
+D() # E: Cannot instantiate abstract class 'D' with abstract attribute 'f'
+E()
+
+[case testInconsistentMro]
+from abc import abstractmethod, ABCMeta
+import typing
+
+class A(metaclass=ABCMeta): pass
+class B(object, A): pass \
+      # E: Cannot determine consistent method resolution order (MRO) for "B"
+
+[case testOverloadedAbstractMethod]
+from abc import abstractmethod, ABCMeta
+from typing import overload
+
+class A(metaclass=ABCMeta):
+  @abstractmethod
+  @overload
+  def f(self, x: int) -> int: pass
+  @abstractmethod
+  @overload
+  def f(self, x: str) -> str: pass
+
+class B(A):
+  @overload
+  def f(self, x: int) -> int: pass
+  @overload
+  def f(self, x: str) -> str: pass
+A() # E: Cannot instantiate abstract class 'A' with abstract attribute 'f'
+B()
+B().f(1)
+a = B() # type: A
+a.f(1)
+a.f('')
+a.f(B()) # E: No overload variant of "f" of "A" matches argument types [__main__.B]
+
+[case testOverloadedAbstractMethodWithAlternativeDecoratorOrder]
+from abc import abstractmethod, ABCMeta
+from typing import overload
+
+class A(metaclass=ABCMeta):
+  @overload
+  @abstractmethod
+  def f(self, x: int) -> int: pass
+  @overload
+  @abstractmethod
+  def f(self, x: str) -> str: pass
+
+class B(A):
+  @overload
+  def f(self, x: int) -> int: pass
+  @overload
+  def f(self, x: str) -> str: pass
+A() # E: Cannot instantiate abstract class 'A' with abstract attribute 'f'
+B()
+B().f(1)
+a = B() # type: A
+a.f(1)
+a.f('')
+a.f(B()) # E: No overload variant of "f" of "A" matches argument types [__main__.B]
+
+[case testOverloadedAbstractMethodVariantMissingDecorator1]
+from abc import abstractmethod, ABCMeta
+from typing import overload
+
+class A(metaclass=ABCMeta):
+  @abstractmethod \
+    # E: Overloaded method has both abstract and non-abstract variants
+  @overload
+  def f(self, x: int) -> int: pass
+  @overload
+  def f(self, x: str) -> str: pass
+[out]
+
+[case testOverloadedAbstractMethodVariantMissingDecorator1]
+from abc import abstractmethod, ABCMeta
+from typing import overload
+
+class A(metaclass=ABCMeta):
+  @overload \
+    # E: Overloaded method has both abstract and non-abstract variants
+  def f(self, x: int) -> int: pass
+  @abstractmethod
+  @overload
+  def f(self, x: str) -> str: pass
+[out]
+
+[case testMultipleInheritanceAndAbstractMethod]
+import typing
+from abc import abstractmethod, ABCMeta
+class A:
+  def f(self, x: str) -> None: pass
+class B(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self, x: str) -> None: pass
+class C(A, B): pass
+
+[case testMultipleInheritanceAndAbstractMethod2]
+import typing
+from abc import abstractmethod, ABCMeta
+class A:
+  def f(self, x: str) -> None: pass
+class B(metaclass=ABCMeta):
+  @abstractmethod
+  def f(self, x: int) -> None: pass
+class C(A, B): pass
+[out]
+main:8: error: Definition of "f" in base class "A" is incompatible with definition in base class "B"
+
+[case testCallAbstractMethodBeforeDefinition]
+import typing
+from abc import abstractmethod, ABCMeta
+class A(metaclass=ABCMeta):
+    def f(self) -> None:
+        self.g(1) # E: Argument 1 to "g" of "A" has incompatible type "int"; expected "str"
+    @abstractmethod
+    def g(self, x: str) -> None: pass
+[out]
+
+[case testAbstractOperatorMethods1]
+import typing
+from abc import abstractmethod, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractmethod
+    def __lt__(self, other: 'A') -> int: pass
+    @abstractmethod
+    def __gt__(self, other: 'A') -> int: pass
+
+[case testAbstractOperatorMethods2]
+import typing
+from abc import abstractmethod, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractmethod
+    def __radd__(self, other: 'C') -> str: pass # Error
+class B:
+    @abstractmethod
+    def __add__(self, other: 'A') -> int: pass
+class C:
+    def __add__(self, other: int) -> B: pass
+[out]
+
+
+-- Abstract properties
+-- -------------------
+
+
+[case testReadOnlyAbstractProperty]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+def f(a: A) -> None:
+    a.x() # E: "int" not callable
+    a.x = 1  # E: Property "x" defined in "A" is read-only
+[out]
+
+[case testReadOnlyAbstractPropertyForwardRef]
+from abc import abstractproperty, ABCMeta
+def f(a: A) -> None:
+    a.x() # E: "int" not callable
+    a.x = 1  # E: Property "x" defined in "A" is read-only
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+[out]
+
+[case testReadWriteAbstractProperty]
+from abc import abstractproperty, ABCMeta
+def f(a: A) -> None:
+    a.x.y # E: "int" has no attribute "y"
+    a.x = 1
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+    @x.setter
+    def x(self, x: int) -> None: pass
+[out]
+
+[case testInstantiateClassWithReadOnlyAbstractProperty]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+class B(A): pass
+b = B() # E: Cannot instantiate abstract class 'B' with abstract attribute 'x'
+
+[case testInstantiateClassWithReadWriteAbstractProperty]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+    @x.setter
+    def x(self, x: int) -> None: pass
+class B(A): pass
+b = B() # E: Cannot instantiate abstract class 'B' with abstract attribute 'x'
+
+[case testImplementAbstractPropertyViaProperty]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+class B(A):
+    @property
+    def x(self) -> int: pass
+b = B()
+b.x() # E: "int" not callable
+[builtins fixtures/property.pyi]
+
+[case testImplementReradWriteAbstractPropertyViaProperty]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+    @x.setter
+    def x(self, v: int) -> None: pass
+class B(A):
+    @property
+    def x(self) -> int: pass
+    @x.setter
+    def x(self, v: int) -> None: pass
+b = B()
+b.x.y # E: "int" has no attribute "y"
+[builtins fixtures/property.pyi]
+
+[case testImplementAbstractPropertyViaPropertyInvalidType]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+class B(A):
+    @property
+    def x(self) -> str: pass # E
+b = B()
+b.x() # E
+[builtins fixtures/property.pyi]
+[out]
+main:7: error: Return type of "x" incompatible with supertype "A"
+main:9: error: "str" not callable
+
+[case testCantImplementAbstractPropertyViaInstanceVariable]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+class B(A):
+    def __init__(self) -> None:
+        self.x = 1 # E
+b = B() # E
+b.x.y # E
+[builtins fixtures/property.pyi]
+[out]
+main:7: error: Property "x" defined in "B" is read-only
+main:8: error: Cannot instantiate abstract class 'B' with abstract attribute 'x'
+main:9: error: "int" has no attribute "y"
+
+[case testSuperWithAbstractProperty]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+class B(A):
+    @property
+    def x(self) -> int:
+        return super().x.y # E: "int" has no attribute "y"
+[builtins fixtures/property.pyi]
+[out]
+
+[case testSuperWithReadWriteAbstractProperty]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+    @x.setter
+    def x(self, v: int) -> None: pass
+class B(A):
+    @property
+    def x(self) -> int:
+        return super().x.y # E
+    @x.setter
+    def x(self, v: int) -> None:
+        super().x = '' # E
+[builtins fixtures/property.pyi]
+[out]
+main:10: error: "int" has no attribute "y"
+main:13: error: Invalid assignment target
+
+[case testOnlyImplementGetterOfReadWriteAbstractProperty]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+    @x.setter
+    def x(self, v: int) -> None: pass
+class B(A):
+    @property # E
+    def x(self) -> int: pass
+b = B()
+b.x.y # E
+[builtins fixtures/property.pyi]
+[out]
+main:8: error: Read-only property cannot override read-write property
+main:11: error: "int" has no attribute "y"
+
+[case testDynamicallyTypedReadOnlyAbstractProperty]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self): pass
+def f(a: A) -> None:
+    a.x.y
+    a.x = 1  # E: Property "x" defined in "A" is read-only
+[out]
+
+[case testDynamicallyTypedReadOnlyAbstractPropertyForwardRef]
+from abc import abstractproperty, ABCMeta
+def f(a: A) -> None:
+    a.x.y
+    a.x = 1  # E: Property "x" defined in "A" is read-only
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self): pass
+[out]
+
+[case testDynamicallyTypedReadWriteAbstractProperty]
+from abc import abstractproperty, ABCMeta
+def f(a: A) -> None:
+    a.x.y
+    a.x = 1
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self): pass
+    @x.setter
+    def x(self, x): pass
+[out]
diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test
new file mode 100644
index 0000000..871091c
--- /dev/null
+++ b/test-data/unit/check-async-await.test
@@ -0,0 +1,393 @@
+-- Tests for async def and await (PEP 492)
+-- ---------------------------------------
+
+[case testAsyncDefPass]
+# flags: --fast-parser
+async def f() -> int:
+    pass
+[builtins fixtures/async_await.pyi]
+
+[case testAsyncDefReturn]
+# flags: --fast-parser
+async def f() -> int:
+    return 0
+reveal_type(f())  # E: Revealed type is 'typing.Awaitable[builtins.int]'
+[builtins fixtures/async_await.pyi]
+
+[case testAsyncDefMissingReturn]
+# flags: --fast-parser --warn-no-return
+async def f() -> int:
+    make_this_not_trivial = 1
+[builtins fixtures/async_await.pyi]
+[out]
+main:2: note: Missing return statement
+
+[case testAsyncDefReturnWithoutValue]
+# flags: --fast-parser
+async def f() -> int:
+    make_this_not_trivial = 1
+    return
+[builtins fixtures/async_await.pyi]
+[out]
+main:4: error: Return value expected
+
+[case testAwaitCoroutine]
+# flags: --fast-parser
+async def f() -> int:
+    x = await f()
+    reveal_type(x)  # E: Revealed type is 'builtins.int*'
+    return x
+[builtins fixtures/async_await.pyi]
+[out]
+
+[case testAwaitDefaultContext]
+# flags: --fast-parser
+from typing import TypeVar
+T = TypeVar('T')
+async def f(x: T) -> T:
+    y = await f(x)
+    reveal_type(y)
+    return y
+[out]
+main:6: error: Revealed type is 'T`-1'
+
+[case testAwaitAnyContext]
+# flags: --fast-parser
+from typing import Any, TypeVar
+T = TypeVar('T')
+async def f(x: T) -> T:
+    y = await f(x)  # type: Any
+    reveal_type(y)
+    return y
+[out]
+main:6: error: Revealed type is 'Any'
+
+[case testAwaitExplicitContext]
+# flags: --fast-parser
+from typing import TypeVar
+T = TypeVar('T')
+async def f(x: T) -> T:
+    y = await f(x)  # type: int
+    reveal_type(y)
+[out]
+main:5: error: Argument 1 to "f" has incompatible type "T"; expected "int"
+main:6: error: Revealed type is 'builtins.int'
+
+[case testAwaitGeneratorError]
+# flags: --fast-parser
+from typing import Any, Generator
+def g() -> Generator[int, None, str]:
+    yield 0
+    return ''
+async def f() -> int:
+    x = await g()
+    return x
+[out]
+main:7: error: Incompatible types in await (actual type Generator[int, None, str], expected type "Awaitable")
+
+[case testAwaitIteratorError]
+# flags: --fast-parser
+from typing import Any, Iterator
+def g() -> Iterator[Any]:
+    yield
+async def f() -> int:
+    x = await g()
+    return x
+[out]
+main:6: error: Incompatible types in await (actual type Iterator[Any], expected type "Awaitable")
+
+[case testAwaitArgumentError]
+# flags: --fast-parser
+def g() -> int:
+    return 0
+async def f() -> int:
+    x = await g()
+    return x
+[builtins fixtures/async_await.pyi]
+[out]
+main:5: error: Incompatible types in await (actual type "int", expected type "Awaitable")
+
+[case testAwaitResultError]
+# flags: --fast-parser
+async def g() -> int:
+    return 0
+async def f() -> str:
+    x = await g()  # type: str
+[builtins fixtures/async_await.pyi]
+[out]
+main:5: error: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+[case testAwaitReturnError]
+# flags: --fast-parser
+async def g() -> int:
+    return 0
+async def f() -> str:
+    x = await g()
+    return x
+[builtins fixtures/async_await.pyi]
+[out]
+main:6: error: Incompatible return value type (got "int", expected "str")
+
+[case testAsyncFor]
+# flags: --fast-parser
+from typing import AsyncIterator
+class C(AsyncIterator[int]):
+    async def __anext__(self) -> int: return 0
+async def f() -> None:
+    async for x in C():
+        reveal_type(x)  # E: Revealed type is 'builtins.int*'
+[builtins fixtures/async_await.pyi]
+[out]
+
+[case testAsyncForError]
+# flags: --fast-parser
+from typing import AsyncIterator
+async def f() -> None:
+    async for x in [1]:
+        pass
+[builtins fixtures/async_await.pyi]
+[out]
+main:4: error: AsyncIterable expected
+main:4: error: List[int] has no attribute "__aiter__"
+
+[case testAsyncWith]
+# flags: --fast-parser
+class C:
+    async def __aenter__(self) -> int: pass
+    async def __aexit__(self, x, y, z) -> None: pass
+async def f() -> None:
+    async with C() as x:
+        reveal_type(x)  # E: Revealed type is 'builtins.int*'
+[builtins fixtures/async_await.pyi]
+[out]
+
+[case testAsyncWithError]
+# flags: --fast-parser
+class C:
+    def __enter__(self) -> int: pass
+    def __exit__(self, x, y, z) -> None: pass
+async def f() -> None:
+    async with C() as x:
+        pass
+[builtins fixtures/async_await.pyi]
+[out]
+main:6: error: "C" has no attribute "__aenter__"; maybe "__enter__"?
+main:6: error: "C" has no attribute "__aexit__"; maybe "__exit__"?
+
+[case testAsyncWithErrorBadAenter]
+# flags: --fast-parser
+class C:
+    def __aenter__(self) -> int: pass
+    async def __aexit__(self, x, y, z) -> None: pass
+async def f() -> None:
+    async with C() as x:  # E: Incompatible types in "async with" for __aenter__ (actual type "int", expected type "Awaitable")
+        pass
+[builtins fixtures/async_await.pyi]
+[out]
+
+[case testAsyncWithErrorBadAenter2]
+# flags: --fast-parser
+class C:
+    def __aenter__(self) -> None: pass
+    async def __aexit__(self, x, y, z) -> None: pass
+async def f() -> None:
+    async with C() as x:  # E: "__aenter__" of "C" does not return a value
+        pass
+[builtins fixtures/async_await.pyi]
+[out]
+
+[case testAsyncWithErrorBadAexit]
+# flags: --fast-parser
+class C:
+    async def __aenter__(self) -> int: pass
+    def __aexit__(self, x, y, z) -> int: pass
+async def f() -> None:
+    async with C() as x: # E: Incompatible types in "async with" for __aexit__ (actual type "int", expected type "Awaitable")
+        pass
+[builtins fixtures/async_await.pyi]
+[out]
+
+[case testAsyncWithErrorBadAexit2]
+# flags: --fast-parser
+class C:
+    async def __aenter__(self) -> int: pass
+    def __aexit__(self, x, y, z) -> None: pass
+async def f() -> None:
+    async with C() as x: # E: "__aexit__" of "C" does not return a value
+        pass
+[builtins fixtures/async_await.pyi]
+[out]
+
+[case testNoYieldInAsyncDef]
+# flags: --fast-parser
+async def f():
+    yield None
+async def g():
+    yield
+async def h():
+    x = yield
+[builtins fixtures/async_await.pyi]
+[out]
+main:3: error: 'yield' in async function
+main:5: error: 'yield' in async function
+main:7: error: 'yield' in async function
+
+[case testNoYieldFromInAsyncDef]
+# flags: --fast-parser
+async def f():
+    yield from []
+async def g():
+    x = yield from []
+[builtins fixtures/async_await.pyi]
+[out]
+main:3: error: 'yield from' in async function
+main:5: error: 'yield from' in async function
+
+[case testNoAsyncDefInPY2_python2]
+# flags: --fast-parser
+async def f():  # E: invalid syntax
+    pass
+
+[case testYieldFromNoAwaitable]
+# flags: --fast-parser
+from typing import Any, Generator
+async def f() -> str:
+    return ''
+def g() -> Generator[Any, None, str]:
+    x = yield from f()
+    return x
+[builtins fixtures/async_await.pyi]
+[out]
+main:6: error: "yield from" can't be applied to Awaitable[str]
+
+[case testAwaitableSubclass]
+# flags: --fast-parser
+from typing import Any, AsyncIterator, Awaitable, Generator
+class A(Awaitable[int]):
+    def __await__(self) -> Generator[Any, None, int]:
+        yield
+        return 0
+class C:
+    def __aenter__(self) -> A:
+        return A()
+    def __aexit__(self, *a) -> A:
+        return A()
+class I(AsyncIterator[int]):
+    def __aiter__(self) -> 'I':
+        return self
+    def __anext__(self) -> A:
+        return A()
+async def main() -> None:
+    x = await A()
+    reveal_type(x)  # E: Revealed type is 'builtins.int'
+    async with C() as y:
+        reveal_type(y)  # E: Revealed type is 'builtins.int'
+    async for z in I():
+        reveal_type(z)  # E: Revealed type is 'builtins.int'
+[builtins fixtures/async_await.pyi]
+[out]
+
+[case testYieldTypeCheckInDecoratedCoroutine]
+# flags: --fast-parser
+from typing import Generator
+from types import coroutine
+ at coroutine
+def f() -> Generator[int, str, int]:
+    x = yield 0
+    x = yield ''  # E: Incompatible types in yield (actual type "str", expected type "int")
+    reveal_type(x)  # E: Revealed type is 'builtins.str'
+    if x:
+        return 0
+    else:
+        return ''  # E: Incompatible return value type (got "str", expected "int")
+[builtins fixtures/async_await.pyi]
+[out]
+
+
+-- The full matrix of coroutine compatibility
+-- ------------------------------------------
+
+[case testFullCoroutineMatrix]
+# flags: --fast-parser
+from typing import Any, AsyncIterator, Awaitable, Generator, Iterator
+from types import coroutine
+
+# The various things you might try to use in `await` or `yield from`.
+
+def plain_generator() -> Generator[str, None, int]:
+    yield 'a'
+    return 1
+
+async def plain_coroutine() -> int:
+    return 1
+
+ at coroutine
+def decorated_generator() -> Generator[str, None, int]:
+    yield 'a'
+    return 1
+
+ at coroutine
+async def decorated_coroutine() -> int:
+    return 1
+
+class It(Iterator[str]):
+    def __iter__(self) -> 'It':
+        return self
+    def __next__(self) -> str:
+        return 'a'
+
+def other_iterator() -> It:
+    return It()
+
+class Aw(Awaitable[int]):
+    def __await__(self) -> Generator[str, Any, int]:
+        yield 'a'
+        return 1
+
+def other_coroutine() -> Aw:
+    return Aw()
+
+# The various contexts in which `await` or `yield from` might occur.
+
+def plain_host_generator() -> Generator[str, None, None]:
+    yield 'a'
+    x = 0
+    x = yield from plain_generator()
+    x = yield from plain_coroutine()  # E: "yield from" can't be applied to Awaitable[int]
+    x = yield from decorated_generator()
+    x = yield from decorated_coroutine()  # E: "yield from" can't be applied to AwaitableGenerator[Any, Any, int, Awaitable[int]]
+    x = yield from other_iterator()
+    x = yield from other_coroutine()  # E: "yield from" can't be applied to "Aw"
+
+async def plain_host_coroutine() -> None:
+    x = 0
+    x = await plain_generator()  # E: Incompatible types in await (actual type Generator[str, None, int], expected type "Awaitable")
+    x = await plain_coroutine()
+    x = await decorated_generator()
+    x = await decorated_coroutine()
+    x = await other_iterator()  # E: Incompatible types in await (actual type "It", expected type "Awaitable")
+    x = await other_coroutine()
+
+ at coroutine
+def decorated_host_generator() -> Generator[str, None, None]:
+    yield 'a'
+    x = 0
+    x = yield from plain_generator()
+    x = yield from plain_coroutine()
+    x = yield from decorated_generator()
+    x = yield from decorated_coroutine()
+    x = yield from other_iterator()
+    x = yield from other_coroutine()  # E: "yield from" can't be applied to "Aw"
+
+ at coroutine
+async def decorated_host_coroutine() -> None:
+    x = 0
+    x = await plain_generator()  # E: Incompatible types in await (actual type Generator[str, None, int], expected type "Awaitable")
+    x = await plain_coroutine()
+    x = await decorated_generator()
+    x = await decorated_coroutine()
+    x = await other_iterator()  # E: Incompatible types in await (actual type "It", expected type "Awaitable")
+    x = await other_coroutine()
+
+[builtins fixtures/async_await.pyi]
+[out]
diff --git a/test-data/unit/check-basic.test b/test-data/unit/check-basic.test
new file mode 100644
index 0000000..05fa1a9
--- /dev/null
+++ b/test-data/unit/check-basic.test
@@ -0,0 +1,310 @@
+[case testEmptyFile]
+[out]
+
+[case testAssignmentAndVarDef]
+
+a = None # type: A
+b = None # type: B
+a = a
+a = b # Fail
+class A: pass
+class B: pass
+[out]
+main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testConstructionAndAssignment]
+
+x = None # type: A
+x = A()
+x = B()
+class A:
+    def __init__(self): pass
+class B:
+    def __init__(self): pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testInheritInitFromObject]
+
+x = None # type: A
+x = A()
+x = B()
+class A(object): pass
+class B(object): pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testImplicitInheritInitFromObject]
+
+x = None # type: A
+o = None # type: object
+x = o # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+x = A()
+o = x
+class A: pass
+class B: pass
+[out]
+
+[case testTooManyConstructorArgs]
+import typing
+object(object())
+[out]
+main:2: error: Too many arguments for "object"
+
+[case testVarDefWithInit]
+import typing
+a = A() # type: A
+b = object() # type: A
+class A: pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "object", variable has type "A")
+
+[case testInheritanceBasedSubtyping]
+import typing
+x = B() # type: A
+y = A() # type: B # Fail
+class A: pass
+class B(A): pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testDeclaredVariableInParentheses]
+
+(x) = None # type: int
+x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+x = 1
+
+
+-- Simple functions and calling
+-- ----------------------------
+
+
+[case testFunction]
+import typing
+def f(x: 'A') -> None: pass
+f(A())
+f(B()) # Fail
+class A: pass
+class B: pass
+[out]
+main:4: error: Argument 1 to "f" has incompatible type "B"; expected "A"
+
+[case testNotCallable]
+import typing
+A()()
+class A: pass
+[out]
+main:2: error: "A" not callable
+
+[case testSubtypeArgument]
+import typing
+def f(x: 'A', y: 'B') -> None: pass
+f(B(), A()) # Fail
+f(B(), B())
+
+class A: pass
+class B(A): pass
+[out]
+main:3: error: Argument 2 to "f" has incompatible type "A"; expected "B"
+
+[case testInvalidArgumentCount]
+import typing
+def f(x, y) -> None: pass
+f(object())
+f(object(), object(), object())
+[out]
+main:3: error: Too few arguments for "f"
+main:4: error: Too many arguments for "f"
+
+
+-- Locals
+-- ------
+
+
+[case testLocalVariables]
+
+def f() -> None:
+  x = None # type: A
+  y = None # type: B
+  x = x
+  x = y # Fail
+class A: pass
+class B: pass
+[out]
+main:6: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testLocalVariableScope]
+
+def f() -> None:
+  x = None # type: A
+  x = A()
+def g() -> None:
+  x = None # type: B
+  x = A() # Fail
+class A: pass
+class B: pass
+[out]
+main:7: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testFunctionArguments]
+import typing
+def f(x: 'A', y: 'B') -> None:
+  x = y # Fail
+  x = x
+  y = B()
+class A: pass
+class B: pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testLocalVariableInitialization]
+import typing
+def f() -> None:
+  a = A() # type: A
+  b = B() # type: A # Fail
+class A: pass
+class B: pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testVariableInitializationWithSubtype]
+import typing
+x = B() # type: A
+y = A() # type: B # Fail
+class A: pass
+class B(A): pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+
+-- Misc
+-- ----
+
+
+[case testInvalidReturn]
+import typing
+def f() -> 'A':
+  return B()
+class A: pass
+class B: pass
+[out]
+main:3: error: Incompatible return value type (got "B", expected "A")
+
+[case testTopLevelContextAndInvalidReturn]
+import typing
+def f() -> 'A':
+  return B()
+a = B() # type: A
+class A: pass
+class B: pass
+[out]
+main:3: error: Incompatible return value type (got "B", expected "A")
+main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testEmptyReturnInAnyTypedFunction]
+from typing import Any
+def f() -> Any:
+  return
+
+[case testEmptyYieldInAnyTypedFunction]
+from typing import Any
+def f() -> Any:
+  yield
+
+[case testModule__name__]
+import typing
+x = __name__ # type: str
+a = __name__ # type: A  # E: Incompatible types in assignment (expression has type "str", variable has type "A")
+class A: pass
+[builtins fixtures/primitives.pyi]
+
+[case testModule__doc__]
+import typing
+x = __doc__ # type: str
+a = __doc__ # type: A  # E: Incompatible types in assignment (expression has type "str", variable has type "A")
+class A: pass
+[builtins fixtures/primitives.pyi]
+
+[case testModule__file__]
+import typing
+x = __file__ # type: str
+a = __file__ # type: A  # E: Incompatible types in assignment (expression has type "str", variable has type "A")
+class A: pass
+[builtins fixtures/primitives.pyi]
+
+[case test__package__]
+import typing
+x = __package__ # type: str
+a = __file__ # type: int  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+
+-- Scoping and shadowing
+-- ---------------------
+
+
+[case testLocalVariableShadowing]
+
+a = None # type: A
+a = B()       # Fail
+a = A()
+def f() -> None:
+  a = None # type: B
+  a = A()     # Fail
+  a = B()
+a = B()       # Fail
+a = A()
+
+class A: pass
+class B: pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:7: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+main:9: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testGlobalDefinedInBlockWithType]
+
+class A: pass
+while A:
+    a = None # type: A
+    a = A()
+    a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+
+
+-- # type: signatures
+-- ------------------
+
+
+[case testFunctionSignatureAsComment]
+def f(x): # type: (int) -> str
+    return 1
+f('')
+[out]
+main:2: error: Incompatible return value type (got "int", expected "str")
+main:3: error: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+[case testMethodSignatureAsComment]
+class A:
+    def f(self, x):
+        # type: (int) -> str
+        self.f('') # Fail
+        return 1
+A().f('') # Fail
+[out]
+main:4: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+main:5: error: Incompatible return value type (got "int", expected "str")
+main:6: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+
+[case testTrailingCommaParsing-skip]
+x = 1
+x in 1,
+if x in 1, :
+    pass
+[out]
+
+[case testInitReturnTypeError]
+class C:
+    def __init__(self):
+        # type: () -> int
+        pass
+[out]
+main:2: error: The return type of "__init__" must be None
diff --git a/test-data/unit/check-bound.test b/test-data/unit/check-bound.test
new file mode 100644
index 0000000..ee935ae
--- /dev/null
+++ b/test-data/unit/check-bound.test
@@ -0,0 +1,203 @@
+-- Enforcement of upper bounds
+-- ---------------------------
+
+
+[case testBoundOnGenericFunction]
+from typing import TypeVar
+
+class A: pass
+class B(A): pass
+class C(A): pass
+class D: pass
+
+T = TypeVar('T', bound=A)
+U = TypeVar('U')
+def f(x: T) -> T: pass
+def g(x: U) -> U:
+    return f(x) # Fail
+
+f(A())
+f(B())
+f(D()) # Fail
+
+b = B()
+b = f(b)
+b = f(C()) # Fail
+[out]
+main:12: error: Type argument 1 of "f" has incompatible value "U"
+main:16: error: Type argument 1 of "f" has incompatible value "D"
+main:20: error: Incompatible types in assignment (expression has type "C", variable has type "B")
+
+
+[case testBoundOnGenericClass]
+from typing import TypeVar, Generic
+
+class A: pass
+class B(A): pass
+T = TypeVar('T', bound=A)
+
+class G(Generic[T]):
+    def __init__(self, x: T) -> None: pass
+
+v = None # type: G[A]
+w = None # type: G[B]
+x = None # type: G[str] # E: Type argument "builtins.str" of "G" must be a subtype of "__main__.A"
+y = G('a') # E: Type argument 1 of "G" has incompatible value "str"
+z = G(A())
+z = G(B())
+
+
+[case testBoundVoid]
+from typing import TypeVar, Generic
+T = TypeVar('T', bound=int)
+class C(Generic[T]):
+    t = None # type: T
+    def get(self) -> T:
+        return self.t
+c1 = None # type: C[None]
+c1.get()
+d = c1.get() # E: Function does not return a value
+
+
+[case testBoundAny]
+from typing import TypeVar, Generic
+T = TypeVar('T', bound=int)
+class C(Generic[T]):
+    def __init__(self, x: T) -> None: pass
+def f(x: T) -> T:
+    return x
+
+def g(): pass
+
+f(g())
+C(g())
+z = None # type: C
+
+
+[case testBoundHigherOrderWithVoid]
+from typing import TypeVar, Callable
+class A: pass
+T = TypeVar('T', bound=A)
+def f(g: Callable[[], T]) -> T:
+    return g()
+def h() -> None: pass
+f(h)
+a = f(h) # E: "h" does not return a value
+
+
+[case testBoundInheritance]
+from typing import TypeVar, Generic
+class A: pass
+T = TypeVar('T')
+TA = TypeVar('TA', bound=A)
+
+class C(Generic[TA]): pass
+class D0(C[TA], Generic[TA]): pass
+class D1(C[T], Generic[T]): pass # E: Type argument "T`1" of "C" must be a subtype of "__main__.A"
+class D2(C[A]): pass
+class D3(C[str]): pass # E: Type argument "builtins.str" of "C" must be a subtype of "__main__.A"
+
+
+-- Using information from upper bounds
+-- -----------------------------------
+
+
+[case testBoundGenericFunctions]
+from typing import TypeVar
+class A: pass
+class B(A): pass
+
+T = TypeVar('T')
+TA = TypeVar('TA', bound=A)
+TB = TypeVar('TB', bound=B)
+
+def f(x: T) -> T:
+    return x
+def g(x: TA) -> TA:
+    return f(x)
+def h(x: TB) -> TB:
+    return g(x)
+def g2(x: TA) -> TA:
+    return h(x) # Fail
+
+def j(x: TA) -> A:
+    return x
+def k(x: TA) -> B:
+    return x # Fail
+[out]
+main:16: error: Type argument 1 of "h" has incompatible value "TA"
+main:21: error: Incompatible return value type (got "TA", expected "B")
+
+
+[case testBoundMethodUsage]
+from typing import TypeVar
+class A0:
+    def foo(self) -> None: pass
+class A(A0):
+    def bar(self) -> None: pass
+    a = 1
+    @property
+    def b(self) -> int:
+        return self.a
+class B(A):
+    def baz(self) -> None: pass
+
+T = TypeVar('T', A)
+
+def f(x: T) -> T:
+    x.foo()
+    x.bar()
+    x.baz()  # E: "A" has no attribute "baz"
+    x.a
+    x.b
+    return x
+
+b = f(B())
+[builtins fixtures/property.pyi]
+[out]
+
+[case testBoundClassMethod]
+from typing import TypeVar
+class A0:
+    @classmethod
+    def foo(cls, x: int) -> int: pass
+class A(A0): pass
+
+T = TypeVar('T', bound=A)
+def f(x: T) -> int:
+    return x.foo(22)
+[builtins fixtures/classmethod.pyi]
+
+
+[case testBoundStaticMethod]
+from typing import TypeVar
+class A0:
+    @staticmethod
+    def foo(x: int) -> int: pass
+class A(A0): pass
+
+T = TypeVar('T', bound=A)
+def f(x: T) -> int:
+    return x.foo(22)
+[builtins fixtures/staticmethod.pyi]
+
+
+[case testBoundOnDecorator]
+from typing import TypeVar, Callable, Any, cast
+T = TypeVar('T', bound=Callable[..., Any])
+
+def twice(f: T) -> T:
+    def result(*args, **kwargs) -> Any:
+        f(*args, **kwargs)
+        return f(*args, **kwargs)
+    return cast(T, result)
+
+ at twice
+def foo(x: int) -> int:
+    return x
+
+a = 1
+b = foo(a)
+b = 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+twice(a) # E: Type argument 1 of "twice" has incompatible value "int"
+[builtins fixtures/args.pyi]
diff --git a/test-data/unit/check-callable.test b/test-data/unit/check-callable.test
new file mode 100644
index 0000000..429ad44
--- /dev/null
+++ b/test-data/unit/check-callable.test
@@ -0,0 +1,345 @@
+[case testCallableDef]
+def f() -> None: pass
+
+if callable(f):
+    f()
+else:
+    f += 5
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableLambda]
+f = lambda: None
+
+if callable(f):
+    f()
+else:
+    f += 5
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableNotCallable]
+x = 5
+
+if callable(x):
+    x()
+else:
+    x += 5
+
+[builtins fixtures/callable.pyi]
+
+[case testUnion]
+from typing import Callable, Union
+
+x = 5  # type: Union[int, Callable[[], str]]
+
+if callable(x):
+    y = x() + 'test'
+else:
+    z = x + 6
+
+[builtins fixtures/callable.pyi]
+
+[case testUnionMultipleReturnTypes]
+from typing import Callable, Union
+
+x = 5  # type: Union[int, Callable[[], str], Callable[[], int]]
+
+if callable(x):
+    y = x() + 2 # E: Unsupported operand types for + (likely involving Union)
+else:
+    z = x + 6
+
+[builtins fixtures/callable.pyi]
+
+[case testUnionMultipleNonCallableTypes]
+from typing import Callable, Union
+
+x = 5  # type: Union[int, str, Callable[[], str]]
+
+if callable(x):
+    y = x() + 'test'
+else:
+    z = x + 6  # E: Unsupported operand types for + (likely involving Union)
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableThenIsinstance]
+from typing import Callable, Union
+
+x = 5  # type: Union[int, str, Callable[[], str], Callable[[], int]]
+
+if callable(x):
+    y = x()
+    if isinstance(y, int):
+        b1 = y + 2
+    else:
+        b2 = y + 'test'
+else:
+    if isinstance(x, int):
+        b3 = x + 3
+    else:
+        b4 = x + 'test2'
+
+[builtins fixtures/callable.pyi]
+
+[case testIsinstanceThenCallable]
+from typing import Callable, Union
+
+x = 5  # type: Union[int, str, Callable[[], str], Callable[[], int]]
+
+if isinstance(x, int):
+    b1 = x + 1
+else:
+    if callable(x):
+        y = x()
+        if isinstance(y, int):
+            b2 = y + 1
+        else:
+            b3 = y + 'test'
+    else:
+        b4 = x + 'test2'
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableWithDifferentArgTypes]
+from typing import Callable, Union
+
+x = 5  # type: Union[int, Callable[[], None], Callable[[int], None]]
+
+if callable(x):
+    x()  # E: Too few arguments
+
+[builtins fixtures/callable.pyi]
+
+[case testClassInitializer]
+from typing import Callable, Union
+
+class A:
+    x = 5
+
+a = A  # type: Union[A, Callable[[], A]]
+
+if callable(a):
+    a = a()
+
+a.x + 6
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableVariables]
+from typing import Union
+
+class A:
+    x = 5
+
+class B:
+    x = int
+
+x = A()  # type: Union[A, B]
+
+if callable(x.x):
+    y = x.x()
+else:
+    y = x.x + 5
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableAnd]
+from typing import Union, Callable
+
+x = 5  # type: Union[int, Callable[[], str]]
+
+if callable(x) and x() == 'test':
+    x()
+else:
+    x + 5  # E: Unsupported left operand type for + (some union)
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableOr]
+from typing import Union, Callable
+
+x = 5  # type: Union[int, Callable[[], str]]
+
+if callable(x) or x() == 'test':  # E: "int" not callable
+    x()  # E: "int" not callable
+else:
+    x + 5
+[builtins fixtures/callable.pyi]
+
+[case testCallableOrOtherType]
+from typing import Union, Callable
+
+x = 5  # type: Union[int, Callable[[], str]]
+
+if callable(x) or x == 2:
+    pass
+else:
+    pass
+[builtins fixtures/callable.pyi]
+
+[case testAnyCallable]
+from typing import Any
+
+x = 5  # type: Any
+
+if callable(x):
+    reveal_type(x)  # E: Revealed type is 'Any'
+else:
+    reveal_type(x)  # E: Revealed type is 'Any'
+[builtins fixtures/callable.pyi]
+
+[case testCallableCallableClasses]
+from typing import Union
+
+
+class A:
+    pass
+
+
+class B:
+    def __call__(self) -> None:
+        pass
+
+
+a = A()  # type: A
+b = B()  # type: B
+c = A()  # type: Union[A, B]
+
+if callable(a):
+    5 + 'test'
+
+if not callable(b):
+    5 + 'test'
+
+if callable(c):
+    reveal_type(c)  # E: Revealed type is '__main__.B'
+else:
+    reveal_type(c)  # E: Revealed type is '__main__.A'
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableNestedUnions]
+from typing import Callable, Union
+
+T = Union[Union[int, Callable[[], int]], Union[str, Callable[[], str]]]
+
+def f(t: T) -> None:
+    if callable(t):
+        reveal_type(t())  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+    else:
+        reveal_type(t)  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableTypeVarEmpty]
+from typing import TypeVar
+
+T = TypeVar('T')
+
+def f(t: T) -> T:
+    if callable(t):
+        return 5
+    else:
+        return t
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableTypeVarUnion]
+from typing import Callable, TypeVar, Union
+
+T = TypeVar('T', int, Callable[[], int], Union[str, Callable[[], str]])
+
+def f(t: T) -> None:
+    if callable(t):
+        reveal_type(t())  # E: Revealed type is 'builtins.int'  # E: Revealed type is 'builtins.str'
+    else:
+        reveal_type(t)  # E: Revealed type is 'builtins.int*'  # E: Revealed type is 'builtins.str'
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableTypeVarBound]
+from typing import TypeVar
+
+
+class A:
+    def __call__(self) -> str:
+        return 'hi'
+
+
+T = TypeVar('T', bound=A)
+
+def f(t: T) -> str:
+    if callable(t):
+        return t()
+    else:
+        return 5
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableTypeType]
+from typing import Type
+
+
+class A:
+    pass
+
+
+T = Type[A]
+
+def f(t: T) -> A:
+    if callable(t):
+        return t()
+    else:
+        return 5
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableTypeUnion]
+from abc import ABCMeta, abstractmethod
+from typing import Type, Union
+
+
+class A(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self) -> None:
+        pass
+
+
+class B:
+    pass
+
+
+x = B  # type: Union[Type[A], Type[B]]
+if callable(x):
+    # Abstract classes raise an error when called, but are indeed `callable`
+    pass
+else:
+    'test' + 5
+
+[builtins fixtures/callable.pyi]
+
+[case testCallableUnionOfTypes]
+from abc import ABCMeta, abstractmethod
+from typing import Type, Union
+
+
+class A(metaclass=ABCMeta):
+    @abstractmethod
+    def f(self) -> None:
+        pass
+
+
+class B:
+    pass
+
+
+x = B  # type: Type[Union[A, B]]
+if callable(x):
+    # Abstract classes raise an error when called, but are indeed `callable`
+    pass
+else:
+    'test' + 5
+
+[builtins fixtures/callable.pyi]
diff --git a/test-data/unit/check-class-namedtuple.test b/test-data/unit/check-class-namedtuple.test
new file mode 100644
index 0000000..a18532d
--- /dev/null
+++ b/test-data/unit/check-class-namedtuple.test
@@ -0,0 +1,378 @@
+[case testNewNamedTupleOldPythonVersion]
+# flags: --fast-parser --python-version 3.5
+from typing import NamedTuple
+
+class E(NamedTuple):  # E: NamedTuple class syntax is only supported in Python 3.6
+    pass
+
+[case testNewNamedTupleNoUnderscoreFields]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    _y: int  # E: NamedTuple field name cannot start with an underscore: _y
+    _z: int  # E: NamedTuple field name cannot start with an underscore: _z
+
+[case testNewNamedTupleAccessingAttributes]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    y: str
+
+x: X
+x.x
+x.y
+x.z # E: "X" has no attribute "z"
+
+[case testNewNamedTupleAttributesAreReadOnly]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+
+x: X
+x.x = 5 # E: Property "x" defined in "X" is read-only
+x.y = 5 # E: "X" has no attribute "y"
+
+class A(X): pass
+a: A
+a.x = 5 # E: Property "x" defined in "A" is read-only
+
+[case testNewNamedTupleCreateWithPositionalArguments]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    y: str
+
+x = X(1, '2')
+x.x
+x.z      # E: "X" has no attribute "z"
+x = X(1) # E: Too few arguments for "X"
+x = X(1, '2', 3)  # E: Too many arguments for "X"
+
+[case testNewNamedTupleShouldBeSingleBase]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class A: ...
+class X(NamedTuple, A):  # E: NamedTuple should be a single base
+    pass
+
+[case testCreateNewNamedTupleWithKeywordArguments]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    y: str
+
+x = X(x=1, y='x')
+x = X(1, y='x')
+x = X(x=1, z=1) # E: Unexpected keyword argument "z" for "X"
+x = X(y='x') # E: Missing positional argument "x" in call to "X"
+
+[case testNewNamedTupleCreateAndUseAsTuple]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    y: str
+
+x = X(1, 'x')
+a, b = x
+a, b, c = x  # E: Need more than 2 values to unpack (3 expected)
+
+[case testNewNamedTupleWithItemTypes]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class N(NamedTuple):
+    a: int
+    b: str
+
+n = N(1, 'x')
+s: str = n.a  # E: Incompatible types in assignment (expression has type "int", \
+                          variable has type "str")
+i: int = n.b  # E: Incompatible types in assignment (expression has type "str", \
+                          variable has type "int")
+x, y = n
+x = y  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testNewNamedTupleConstructorArgumentTypes]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class N(NamedTuple):
+    a: int
+    b: str
+
+n = N('x', 'x') # E: Argument 1 to "N" has incompatible type "str"; expected "int"
+n = N(1, b=2)   # E: Argument 2 to "N" has incompatible type "int"; expected "str"
+N(1, 'x')
+N(b='x', a=1)
+
+[case testNewNamedTupleAsBaseClass]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class N(NamedTuple):
+    a: int
+    b: str
+
+class X(N):
+    pass
+x = X(1, 2)  # E: Argument 2 to "X" has incompatible type "int"; expected "str"
+s = ''
+i = 0
+s = x.a  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+i, s = x
+s, s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+[case testNewNamedTupleSelfTypeWithNamedTupleAsBase]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class A(NamedTuple):
+    a: int
+    b: str
+
+class B(A):
+    def f(self, x: int) -> None:
+        self.f(self.a)
+        self.f(self.b)  # E: Argument 1 to "f" of "B" has incompatible type "str"; expected "int"
+        i = 0
+        s = ''
+        i, s = self
+        i, i = self  # E: Incompatible types in assignment (expression has type "str", \
+                          variable has type "int")
+[out]
+
+[case testNewNamedTupleTypeReferenceToClassDerivedFrom]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class A(NamedTuple):
+    a: int
+    b: str
+
+class B(A):
+    def f(self, x: 'B') -> None:
+        i = 0
+        s = ''
+        self = x
+        i, s = x
+        i, s = x.a, x.b
+        i, s = x.a, x.a  # E: Incompatible types in assignment (expression has type "int", \
+                              variable has type "str")
+        i, i = self  # E: Incompatible types in assignment (expression has type "str", \
+                          variable has type "int")
+
+[out]
+
+[case testNewNamedTupleSubtyping]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple, Tuple
+
+class A(NamedTuple):
+    a: int
+    b: str
+
+class B(A): pass
+a = A(1, '')
+b = B(1, '')
+t: Tuple[int, str]
+b = a  # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = t  # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "A")
+b = t  # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "B")
+t = a
+t = (1, '')
+t = b
+a = b
+
+[case testNewNamedTupleSimpleTypeInference]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple, Tuple
+
+class A(NamedTuple):
+    a: int
+
+l = [A(1), A(2)]
+a = A(1)
+a = l[0]
+(i,) = l[0]
+i, i = l[0]  # E: Need more than 1 value to unpack (2 expected)
+l = [A(1)]
+a = (1,)  # E: Incompatible types in assignment (expression has type "Tuple[int]", \
+               variable has type "A")
+[builtins fixtures/list.pyi]
+
+[case testNewNamedTupleMissingClassAttribute]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class MyNamedTuple(NamedTuple):
+    a: int
+    b: str
+
+MyNamedTuple.x # E: "MyNamedTuple" has no attribute "x"
+
+[case testNewNamedTupleEmptyItems]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class A(NamedTuple):
+    ...
+
+[case testNewNamedTupleForwardRef]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class A(NamedTuple):
+    b: 'B'
+
+class B: ...
+
+a = A(B())
+a = A(1)  # E: Argument 1 to "A" has incompatible type "int"; expected "B"
+
+[case testNewNamedTupleProperty]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class A(NamedTuple):
+    a: int
+
+class B(A):
+    @property
+    def b(self) -> int:
+        return self.a
+class C(B): pass
+B(1).b
+C(2).b
+
+[builtins fixtures/property.pyi]
+
+[case testNewNamedTupleAsDict]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple, Any
+
+class X(NamedTuple):
+    x: Any
+    y: Any
+
+x: X
+reveal_type(x._asdict())  # E: Revealed type is 'builtins.dict[builtins.str, Any]'
+
+[builtins fixtures/dict.pyi]
+
+[case testNewNamedTupleReplaceTyped]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    y: str
+
+x: X
+reveal_type(x._replace())  # E: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]'
+x._replace(x=5)
+x._replace(y=5)  # E: Argument 1 to X._replace has incompatible type "int"; expected "str"
+
+[case testNewNamedTupleFields]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    y: str
+
+reveal_type(X._fields)  # E: Revealed type is 'Tuple[builtins.str, builtins.str]'
+
+[case testNewNamedTupleUnit]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    pass
+
+x: X = X()
+x._replace()
+x._fields[0]  # E: Tuple index out of range
+
+[case testNewNamedTupleJoinNamedTuple]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    y: str
+class Y(NamedTuple):
+    x: int
+    y: str
+
+reveal_type([X(3, 'b'), Y(1, 'a')])  # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]'
+
+[builtins fixtures/list.pyi]
+
+[case testNewNamedTupleJoinTuple]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    y: str
+
+reveal_type([(3, 'b'), X(1, 'a')])  # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]'
+reveal_type([X(1, 'a'), (3, 'b')])  # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]'
+
+[builtins fixtures/list.pyi]
+
+[case testNewNamedTupleWithTooManyArguments]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    y = z = 2  # E: Invalid statement in NamedTuple definition; expected "field_name: field_type"
+    def f(self): pass  # E: Invalid statement in NamedTuple definition; expected "field_name: field_type"
+
+[case testNewNamedTupleWithInvalidItems2]
+# flags: --fast-parser --python-version 3.6
+import typing
+
+class X(typing.NamedTuple):
+    x: int
+    y: str = 'y'  # E: Right hand side values are not supported in NamedTuple
+    z = None  # type: int # E: Invalid statement in NamedTuple definition; expected "field_name: field_type"
+    x[0]: int  # E: Invalid statement in NamedTuple definition; expected "field_name: field_type"
+
+[builtins fixtures/list.pyi]
+
+[case testNewNamedTupleWithoutTypesSpecified]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple
+
+class X(NamedTuple):
+    x: int
+    y = 2  # E: Invalid statement in NamedTuple definition; expected "field_name: field_type"
+
+[case testTypeUsingTypeCNamedTuple]
+# flags: --fast-parser --python-version 3.6
+from typing import NamedTuple, Type
+
+class N(NamedTuple):
+    x: int
+    y: str
+
+def f(a: Type[N]):
+    a()
+[builtins fixtures/list.pyi]
+[out]
+main:8: error: Unsupported type Type["N"]
diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test
new file mode 100644
index 0000000..9e46ef9
--- /dev/null
+++ b/test-data/unit/check-classes.test
@@ -0,0 +1,2761 @@
+-- Methods
+-- -------
+
+
+[case testMethodCall]
+
+a = None # type: A
+b = None # type: B
+
+a.foo(B())        # Fail
+a.bar(B(), A())   # Fail
+
+a.foo(A())
+b.bar(B(), A())
+
+class A:
+    def foo(self, x: 'A') -> None: pass
+class B:
+    def bar(self, x: 'B', y: A) -> None: pass
+[out]
+main:5: error: Argument 1 to "foo" of "A" has incompatible type "B"; expected "A"
+main:6: error: "A" has no attribute "bar"
+
+[case testMethodCallWithSubtype]
+
+a = None # type: A
+a.foo(A())
+a.foo(B())
+a.bar(A()) # Fail
+a.bar(B())
+
+class A:
+    def foo(self, x: 'A') -> None: pass
+    def bar(self, x: 'B') -> None: pass
+class B(A): pass
+[out]
+main:5: error: Argument 1 to "bar" of "A" has incompatible type "A"; expected "B"
+
+[case testInheritingMethod]
+
+a = None # type: B
+a.foo(A()) # Fail
+a.foo(B())
+
+class A:
+    def foo(self, x: 'B') -> None: pass
+class B(A): pass
+[out]
+main:3: error: Argument 1 to "foo" of "A" has incompatible type "A"; expected "B"
+
+[case testMethodCallWithInvalidNumberOfArguments]
+
+a = None # type: A
+a.foo()               # Fail
+a.foo(object(), A())  # Fail
+
+class A:
+    def foo(self, x: 'A') -> None: pass
+[out]
+main:3: error: Too few arguments for "foo" of "A"
+main:4: error: Too many arguments for "foo" of "A"
+main:4: error: Argument 1 to "foo" of "A" has incompatible type "object"; expected "A"
+
+[case testMethodBody]
+import typing
+class A:
+    def f(self) -> None:
+        a = object() # type: A    # Fail
+[out]
+main:4: error: Incompatible types in assignment (expression has type "object", variable has type "A")
+
+[case testMethodArguments]
+import typing
+class A:
+    def f(self, a: 'A', b: 'B') -> None:
+        a = B() # Fail
+        b = A() # Fail
+        a = A()
+        b = B()
+        a = a
+        a = b # Fail
+class B: pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:5: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+main:9: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testReturnFromMethod]
+import typing
+class A:
+    def f(self) -> 'A':
+        return B() # Fail
+        return A()
+class B: pass
+[out]
+main:4: error: Incompatible return value type (got "B", expected "A")
+
+[case testSelfArgument]
+import typing
+class A:
+    def f(self) -> None:
+        o = self # type: B    # Fail
+        self.g()      # Fail
+        a = self # type: A
+        self.f()
+class B: pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+main:5: error: "A" has no attribute "g"
+
+[case testAssignToMethodViaInstance]
+import typing
+class A:
+    def f(self): pass
+A().f = None # E: Cannot assign to a method
+
+
+-- Attributes
+-- ----------
+
+
+[case testReferToInvalidAttribute]
+
+class A:
+    def __init__(self):
+        self.x = object()
+a = None # type: A
+a.y
+a.y = object()
+a.x
+a.x = object()
+[out]
+main:6: error: "A" has no attribute "y"
+main:7: error: "A" has no attribute "y"
+
+[case testArgumentTypeInference]
+
+class A:
+    def __init__(self, aa: 'A', bb: 'B') -> None:
+        self.a = aa
+        self.b = bb
+class B: pass
+a = None # type: A
+b = None # type: B
+a.a = b # Fail
+a.b = a # Fail
+b.a     # Fail
+a.a = a
+a.b = b
+[out]
+main:9: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:10: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+main:11: error: "B" has no attribute "a"
+
+[case testExplicitAttributeInBody]
+
+a = None # type: A
+a.x = object() # Fail
+a.x = A()
+class A:
+  x = None # type: A
+[out]
+main:3: error: Incompatible types in assignment (expression has type "object", variable has type "A")
+
+[case testAttributeDefinedInNonInitMethod]
+import typing
+class A:
+    def f(self) -> None:
+        self.x = 1
+        self.y = ''
+        self.x = 1
+a = A()
+a.x = 1
+a.y = ''
+a.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+a.z = 0  # E: "A" has no attribute "z"
+
+[case testInheritanceAndAttributeAssignment]
+import typing
+class A:
+    def f(self) -> None:
+        self.x = 0
+class B(A):
+    def f(self) -> None:
+        self.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+[out]
+
+[case testAssignmentToAttributeInMultipleMethods]
+import typing
+class A:
+    def f(self) -> None:
+        self.x = 0
+    def g(self) -> None:
+        self.x = '' # Fail
+    def __init__(self) -> None:
+        self.x = '' # Fail
+[out]
+main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+main:8: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+
+-- Method overriding
+-- -----------------
+
+
+[case testMethodOverridingWithIdenticalSignature]
+import typing
+class A:
+    def f(self, x: 'A') -> None: pass
+    def g(self, x: 'B' , y: object) -> 'A': pass
+    def h(self) -> None: pass
+class B(A):
+    def f(self, x: A) -> None: pass
+    def g(self, x: 'B' , y: object) -> A: pass
+    def h(self) -> None: pass
+[out]
+
+[case testMethodOverridingWithCovariantType]
+import typing
+class A:
+  def f(self, x: 'A', y: 'B') -> 'A': pass
+  def g(self, x: 'A', y: 'B') -> 'A': pass
+class B(A):
+  def f(self, x: A, y: 'B') -> 'B': pass
+  def g(self, x: A, y: A) -> 'A': pass
+[out]
+
+[case testMethodOverridingWithIncompatibleTypes]
+import typing
+class A:
+  def f(self, x: 'A', y: 'B') -> 'A': pass
+  def g(self, x: 'A', y: 'B') -> 'A': pass
+  def h(self, x: 'A', y: 'B') -> 'A': pass
+class B(A):
+  def f(self, x: 'B', y: 'B') -> A: pass  # Fail
+  def g(self, x: A, y: A) -> A: pass
+  def h(self, x: A, y: 'B') -> object: pass  # Fail
+[out]
+main:7: error: Argument 1 of "f" incompatible with supertype "A"
+main:9: error: Return type of "h" incompatible with supertype "A"
+
+[case testMethodOverridingWithIncompatibleArgumentCount]
+import typing
+class A:
+    def f(self, x: 'A') -> None: pass
+    def g(self, x: 'A', y: 'B') -> 'A': pass
+class B(A):
+    def f(self, x: A, y: A) -> None: pass # Fail
+    def g(self, x: A) -> A: pass # Fail
+[out]
+main:6: error: Signature of "f" incompatible with supertype "A"
+main:7: error: Signature of "g" incompatible with supertype "A"
+
+[case testMethodOverridingAcrossDeepInheritanceHierarchy1]
+import typing
+class A:
+    def f(self, x: 'B') -> None: pass
+class B(A): pass
+class C(B): # with gap in implementations
+    def f(self, x: 'C') -> None:  # Fail
+        pass
+[out]
+main:6: error: Argument 1 of "f" incompatible with supertype "A"
+
+[case testMethodOverridingAcrossDeepInheritanceHierarchy2]
+import typing
+class A:
+    def f(self) -> 'B': pass
+class B(A):
+    def f(self) -> 'C': pass
+class C(B): # with multiple implementations
+    def f(self) -> B:  # Fail
+        pass
+[out]
+main:7: error: Return type of "f" incompatible with supertype "B"
+
+[case testMethodOverridingWithVoidReturnValue]
+import typing
+class A:
+    def f(self) -> None: pass
+    def g(self) -> 'A': pass
+class B(A):
+    def f(self) -> A: pass  # Fail
+    def g(self) -> None: pass  # Fail
+[out]
+main:6: error: Return type of "f" incompatible with supertype "A"
+main:7: error: Return type of "g" incompatible with supertype "A"
+
+[case testOverride__new__WithDifferentSignature]
+class A:
+    def __new__(cls, x: int) -> str:
+        return ''
+
+class B(A):
+    def __new__(cls) -> int:
+        return 1
+
+[case testInnerFunctionNotOverriding]
+class A:
+    def f(self) -> int: pass
+
+class B(A):
+    def g(self) -> None:
+        def f(self) -> str: pass
+
+
+-- Constructors
+-- ------------
+
+
+[case testTrivialConstructor]
+import typing
+a = A() # type: A
+b = A() # type: B # Fail
+class A:
+    def __init__(self) -> None: pass
+class B: pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testConstructor]
+import typing
+a = A(B()) # type: A
+aa = A(object()) # type: A  # Fail
+b = A(B()) # type: B       # Fail
+class A:
+    def __init__(self, x: 'B') -> None: pass
+class B: pass
+[out]
+main:3: error: Argument 1 to "A" has incompatible type "object"; expected "B"
+main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testConstructorWithTwoArguments]
+import typing
+a = A(C(), B()) # type: A  # Fail
+
+class A:
+    def __init__(self, x: 'B', y: 'C') -> None: pass
+class B: pass
+class C(B): pass
+[out]
+main:2: error: Argument 2 to "A" has incompatible type "B"; expected "C"
+
+[case testInheritedConstructor]
+import typing
+b = B(C()) # type: B
+a = B(D()) # type: A # Fail
+class A:
+    def __init__(self, x: 'C') -> None: pass
+class B(A): pass
+class C: pass
+class D: pass
+[out]
+main:3: error: Argument 1 to "B" has incompatible type "D"; expected "C"
+
+[case testOverridingWithIncompatibleConstructor]
+import typing
+A()    # Fail
+B(C()) # Fail
+A(C())
+B()
+class A:
+    def __init__(self, x: 'C') -> None: pass
+class B(A):
+    def __init__(self) -> None: pass
+class C: pass
+[out]
+main:2: error: Too few arguments for "A"
+main:3: error: Too many arguments for "B"
+
+[case testConstructorWithReturnValueType]
+import typing
+class A:
+    def __init__(self) -> 'A': pass
+[out]
+main:3: error: The return type of "__init__" must be None
+
+[case testConstructorWithImplicitReturnValueType]
+import typing
+class A:
+    def __init__(self, x: int): pass
+[out]
+main:3: error: The return type of "__init__" must be None
+
+[case testInitSubclassWithReturnValueType]
+import typing
+class A:
+    def __init_subclass__(cls) -> 'A': pass
+[out]
+main:3: error: The return type of "__init_subclass__" must be None
+
+[case testInitSubclassWithImplicitReturnValueType]
+import typing
+class A:
+    def __init_subclass__(cls, x: int=1): pass
+[out]
+main:3: error: The return type of "__init_subclass__" must be None
+
+[case testGlobalFunctionInitWithReturnType]
+import typing
+a = __init__() # type: A
+b = __init__() # type: B # Fail
+def __init__() -> 'A': pass
+class A: pass
+class B: pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testAccessingInit]
+from typing import Any, cast
+class A:
+    def __init__(self, a: 'A') -> None: pass
+a = None # type: A
+a.__init__(a)  # E: Cannot access "__init__" directly
+(cast(Any, a)).__init__(a)
+
+[case testDeepInheritanceHierarchy]
+import typing
+d = C() # type: D  # Fail
+d = B()      # Fail
+d = A()      # Fail
+d = D2()     # Fail
+a = D() # type: A
+a = D2()
+b = D() # type: B
+b = D2()
+
+class A: pass
+class B(A): pass
+class C(B): pass
+class D(C): pass
+class D2(C): pass
+[out]
+main:2: error: Incompatible types in assignment (expression has type "C", variable has type "D")
+main:3: error: Incompatible types in assignment (expression has type "B", variable has type "D")
+main:4: error: Incompatible types in assignment (expression has type "A", variable has type "D")
+main:5: error: Incompatible types in assignment (expression has type "D2", variable has type "D")
+
+
+-- Attribute access in class body
+-- ------------------------------
+
+
+[case testDataAttributeRefInClassBody]
+import typing
+class B: pass
+class A:
+    x = B()
+    y = x
+    b = x # type: B
+    b = x
+    c = x # type: A # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    c = b   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+[out]
+
+[case testMethodRefInClassBody]
+from typing import Callable
+class B: pass
+class A:
+    def f(self) -> None: pass
+    g = f
+    h = f # type: Callable[[A], None]
+    h = f
+    g = h
+    ff = f # type: Callable[[B], None]  # E: Incompatible types in assignment (expression has type Callable[[A], None], variable has type Callable[[B], None])
+    g = ff                # E: Incompatible types in assignment (expression has type Callable[[B], None], variable has type Callable[[A], None])
+[out]
+
+
+-- Arbitrary statements in class body
+-- ----------------------------------
+
+
+[case testStatementsInClassBody]
+import typing
+class B: pass
+class A:
+    for x in [A()]:
+        y = x
+        y = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    x = A()
+    y = A()
+    x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+[builtins fixtures/for.pyi]
+[out]
+
+
+-- Class attributes
+-- ----------------
+
+
+[case testAccessMethodViaClass]
+import typing
+class A:
+    def f(self) -> None: pass
+A.f(A())
+A.f(object())     # E: Argument 1 to "f" of "A" has incompatible type "object"; expected "A"
+A.f()             # E: Too few arguments for "f" of "A"
+A.f(None, None)   # E: Too many arguments for "f" of "A"
+
+[case testAccessAttributeViaClass]
+import typing
+class B: pass
+class A:
+    x = None # type: A
+a = A.x # type: A
+b = A.x # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testAccessingUndefinedAttributeViaClass]
+import typing
+class A: pass
+A.x # E: "A" has no attribute "x"
+
+[case testAccessingUndefinedAttributeViaClassWithOverloadedInit]
+from typing import overload
+class A:
+    @overload
+    def __init__(self): pass
+    @overload
+    def __init__(self, x): pass
+A.x # E: "A" has no attribute "x"
+
+[case testAccessMethodOfClassWithOverloadedInit]
+from typing import overload, Any
+class A:
+    @overload
+    def __init__(self) -> None: pass
+    @overload
+    def __init__(self, x: Any) -> None: pass
+    def f(self) -> None: pass
+A.f(A())
+A.f()    # E: Too few arguments for "f" of "A"
+
+[case testAssignmentToClassDataAttribute]
+import typing
+class B: pass
+class A:
+    x = None # type: B
+A.x = B()
+A.x = object()  # E: Incompatible types in assignment (expression has type "object", variable has type "B")
+
+[case testAssignmentToInferredClassDataAttribute]
+import typing
+class B: pass
+class A:
+     x = B()
+A.x = B()
+A.x = A()   # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testInitMethodUnbound]
+
+class B: pass
+class A:
+    def __init__(self, b: B) -> None: pass
+a = None # type: A
+b = None # type: B
+A.__init__(a, b)
+A.__init__(b, b) # E: Argument 1 to "__init__" of "A" has incompatible type "B"; expected "A"
+A.__init__(a, a) # E: Argument 2 to "__init__" of "A" has incompatible type "A"; expected "B"
+
+[case testAssignToMethodViaClass]
+import typing
+class A:
+    def f(self): pass
+A.f = None # E: Cannot assign to a method
+
+[case testAssignToNestedClassViaClass]
+import typing
+class A:
+    class B: pass
+A.B = None # E: Cannot assign to a type
+
+[case testAccessingClassAttributeWithTypeInferenceIssue]
+x = C.x # E: Cannot determine type of 'x'
+def f() -> int: return 1
+class C:
+    x = f()
+[builtins fixtures/list.pyi]
+
+[case testAccessingClassAttributeWithTypeInferenceIssue2]
+class C:
+    x = []
+x = C.x
+[builtins fixtures/list.pyi]
+[out]
+main:2: error: Need type annotation for variable
+
+
+-- Nested classes
+-- --------------
+
+
+[case testClassWithinFunction]
+
+def f() -> None:
+    class A:
+        def g(self) -> None: pass
+    a = None # type: A
+    a.g()
+    a.g(a) # E: Too many arguments for "g" of "A"
+[out]
+
+[case testConstructNestedClass]
+import typing
+class A:
+    class B: pass
+    b = B()
+    b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    b = B(b) # E: Too many arguments for "B"
+[out]
+
+[case testConstructNestedClassWithCustomInit]
+import typing
+class A:
+    def f(self) -> None:
+        class B:
+            def __init__(self, a: 'A') -> None: pass
+        b = B(A())
+        b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+        b = B() # E: Too few arguments for "B"
+[out]
+
+[case testDeclareVariableWithNestedClassType]
+
+def f() -> None:
+    class A: pass
+    a = None # type: A
+    a = A()
+    a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+[out]
+
+[case testExternalReferenceToClassWithinClass]
+
+class A:
+    class B: pass
+b = None # type: A.B
+b = A.B()
+b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b = A.B(b) # E: Too many arguments for "B"
+
+
+-- Declaring attribute type in method
+-- ----------------------------------
+
+
+[case testDeclareAttributeTypeInInit]
+
+class A:
+    def __init__(self):
+        self.x = None # type: int
+a = None # type: A
+a.x = 1
+a.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testAccessAttributeDeclaredInInitBeforeDeclaration]
+
+a = None # type: A
+a.x = 1
+a.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+class A:
+    def __init__(self):
+        self.x = None # type: int
+
+
+-- Special cases
+-- -------------
+
+
+[case testMultipleClassDefinition]
+import typing
+A()
+class A: pass
+class A: pass
+[out]
+main:4: error: Name 'A' already defined
+
+[case testDocstringInClass]
+import typing
+class A:
+    """Foo"""
+class B:
+    'x'
+    y = B()
+[builtins fixtures/primitives.pyi]
+
+[case testErrorMessageInFunctionNestedWithinMethod]
+import typing
+class A:
+    def f(self) -> None:
+        def g() -> None:
+            a = None
+        b = None
+[out]
+main:5: error: Need type annotation for variable
+main:6: error: Need type annotation for variable
+
+
+-- Static methods
+-- --------------
+
+
+[case testSimpleStaticMethod]
+import typing
+class A:
+  @staticmethod
+  def f(x: int) -> None: pass
+A.f(1)
+A().f(1)
+A.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+[builtins fixtures/staticmethod.pyi]
+
+[case testBuiltinStaticMethod]
+import typing
+int.from_bytes(b'', '')
+int.from_bytes('', '') # E: Argument 1 to "from_bytes" of "int" has incompatible type "str"; expected "bytes"
+[builtins fixtures/staticmethod.pyi]
+
+[case testAssignStaticMethodOnInstance]
+import typing
+class A:
+  @staticmethod
+  def f(x: int) -> None: pass
+A().f = A.f # E: Cannot assign to a method
+[builtins fixtures/staticmethod.pyi]
+
+
+-- Class methods
+-- -------------
+
+
+[case testSimpleClassMethod]
+import typing
+class A:
+  @classmethod
+  def f(cls, x: int) -> None: pass
+A.f(1)
+A().f(1)
+A.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+[builtins fixtures/classmethod.pyi]
+
+[case testBuiltinClassMethod]
+import typing
+int.from_bytes(b'', '')
+int.from_bytes('', '') # E: Argument 1 to "from_bytes" of "int" has incompatible type "str"; expected "bytes"
+[builtins fixtures/classmethod.pyi]
+
+[case testAssignClassMethodOnClass]
+import typing
+class A:
+  @classmethod
+  def f(cls, x: int) -> None: pass
+A.f = A.f # E: Cannot assign to a method
+[builtins fixtures/classmethod.pyi]
+
+[case testAssignClassMethodOnInstance]
+import typing
+class A:
+  @classmethod
+  def f(cls, x: int) -> None: pass
+A().f = A.f # E: Cannot assign to a method
+[builtins fixtures/classmethod.pyi]
+
+[case testClassMethodCalledInClassMethod]
+import typing
+class C:
+  @classmethod
+  def foo(cls) -> None: pass
+  @classmethod
+  def bar(cls) -> None:
+    cls()
+    cls(1)      # E: Too many arguments for "C"
+    cls.bar()
+    cls.bar(1)  # E: Too many arguments for "bar" of "C"
+    cls.bozo()  # E: "C" has no attribute "bozo"
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testClassMethodCalledOnClass]
+import typing
+class C:
+  @classmethod
+  def foo(cls) -> None: pass
+C.foo()
+C.foo(1)  # E: Too many arguments for "foo" of "C"
+C.bozo()  # E: "C" has no attribute "bozo"
+[builtins fixtures/classmethod.pyi]
+
+[case testClassMethodCalledOnInstance]
+import typing
+class C:
+  @classmethod
+  def foo(cls) -> None: pass
+C().foo()
+C().foo(1)  # E: Too many arguments for "foo" of "C"
+C.bozo()    # E: "C" has no attribute "bozo"
+[builtins fixtures/classmethod.pyi]
+
+[case testClassMethodMayCallAbstractMethod]
+from abc import abstractmethod
+import typing
+class C:
+  @classmethod
+  def foo(cls) -> None:
+      cls().bar()
+  @abstractmethod
+  def bar(self) -> None:
+      pass
+[builtins fixtures/classmethod.pyi]
+
+
+-- Properties
+-- ----------
+
+
+[case testAccessingReadOnlyProperty]
+import typing
+class A:
+    @property
+    def f(self) -> str: pass
+a = A()
+reveal_type(a.f)  # E: Revealed type is 'builtins.str'
+[builtins fixtures/property.pyi]
+
+[case testAssigningToReadOnlyProperty]
+import typing
+class A:
+    @property
+    def f(self) -> str: pass
+A().f = '' # E: Property "f" defined in "A" is read-only
+[builtins fixtures/property.pyi]
+
+[case testPropertyGetterBody]
+import typing
+class A:
+    @property
+    def f(self) -> str:
+        self.x = 1
+        self.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+[builtins fixtures/property.pyi]
+[out]
+
+[case testDynamicallyTypedProperty]
+import typing
+class A:
+    @property
+    def f(self): pass
+a = A()
+a.f.xx
+a.f = '' # E: Property "f" defined in "A" is read-only
+[builtins fixtures/property.pyi]
+
+[case testPropertyWithSetter]
+import typing
+class A:
+    @property
+    def f(self) -> int:
+        return 1
+    @f.setter
+    def f(self, x: int) -> None:
+        pass
+a = A()
+a.f = a.f
+a.f.x # E: "int" has no attribute "x"
+a.f = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+[builtins fixtures/property.pyi]
+
+[case testPropertyWithDeleterButNoSetter]
+import typing
+class A:
+    @property
+    def f(self) -> int:
+        return 1
+    @f.deleter
+    def f(self, x) -> None:
+        pass
+a = A()
+a.f = a.f # E: Property "f" defined in "A" is read-only
+a.f.x # E: "int" has no attribute "x"
+[builtins fixtures/property.pyi]
+
+-- Descriptors
+-- -----------
+
+
+[case testAccessingNonDataDescriptor]
+from typing import Any
+class D:
+    def __get__(self, inst: Any, own: Any) -> str: return 's'
+class A:
+    f = D()
+a = A()
+reveal_type(a.f)  # E: Revealed type is 'builtins.str'
+
+[case testSettingNonDataDescriptor]
+from typing import Any
+class D:
+    def __get__(self, inst: Any, own: Any) -> str: return 's'
+class A:
+    f = D()
+a = A()
+a.f = 'foo'
+a.f = D()  # E: Incompatible types in assignment (expression has type "D", variable has type "str")
+
+[case testSettingDataDescriptor]
+from typing import Any
+class D:
+    def __get__(self, inst: Any, own: Any) -> str: return 's'
+    def __set__(self, inst: Any, value: str) -> None: pass
+class A:
+    f = D()
+a = A()
+a.f = ''
+a.f = 1 # E: Argument 2 to "__set__" of "D" has incompatible type "int"; expected "str"
+
+[case testReadingDescriptorWithoutDunderGet]
+from typing import Union, Any
+class D:
+    def __set__(self, inst: Any, value: str) -> None: pass
+class A:
+    f = D()
+    def __init__(self): self.f = 's'
+a = A()
+reveal_type(a.f)  # E: Revealed type is '__main__.D'
+
+[case testAccessingDescriptorFromClass]
+# flags: --strict-optional
+from d import D, Base
+class A(Base):
+    f = D()
+reveal_type(A.f)  # E: Revealed type is 'd.D'
+reveal_type(A().f)  # E: Revealed type is 'builtins.str'
+[file d.pyi]
+from typing import TypeVar, Type, Generic, overload
+class Base: pass
+class D:
+    def __init__(self) -> None: pass
+    @overload
+    def __get__(self, inst: None, own: Type[Base]) -> D: pass
+    @overload
+    def __get__(self, inst: Base, own: Type[Base]) -> str: pass
+[builtins fixtures/bool.pyi]
+
+[case testAccessingDescriptorFromClassWrongBase]
+# flags: --strict-optional
+from d import D, Base
+class A:
+    f = D()
+reveal_type(A.f)
+reveal_type(A().f)
+[file d.pyi]
+from typing import TypeVar, Type, Generic, overload
+class Base: pass
+class D:
+    def __init__(self) -> None: pass
+    @overload
+    def __get__(self, inst: None, own: Type[Base]) -> D: pass
+    @overload
+    def __get__(self, inst: Base, own: Type[Base]) -> str: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:5: error: Revealed type is 'Any'
+main:5: error: No overload variant of "__get__" of "D" matches argument types [builtins.None, Type[__main__.A]]
+main:6: error: Revealed type is 'Any'
+main:6: error: No overload variant of "__get__" of "D" matches argument types [__main__.A, Type[__main__.A]]
+
+
+[case testAccessingGenericNonDataDescriptor]
+from typing import TypeVar, Type, Generic, Any
+V = TypeVar('V')
+class D(Generic[V]):
+    def __init__(self, v: V) -> None: self.v = v
+    def __get__(self, inst: Any, own: Type) -> V: return self.v
+class A:
+    f = D(10)
+    g = D('10')
+a = A()
+reveal_type(a.f)  # E: Revealed type is 'builtins.int*'
+reveal_type(a.g)  # E: Revealed type is 'builtins.str*'
+
+[case testSettingGenericDataDescriptor]
+from typing import TypeVar, Type, Generic, Any
+V = TypeVar('V')
+class D(Generic[V]):
+    def __init__(self, v: V) -> None: self.v = v
+    def __get__(self, inst: Any, own: Type) -> V: return self.v
+    def __set__(self, inst: Any, v: V) -> None: pass
+class A:
+    f = D(10)
+    g = D('10')
+a = A()
+a.f = 1
+a.f = '' # E: Argument 2 to "__set__" of "D" has incompatible type "str"; expected "int"
+a.g = ''
+a.g = 1 # E: Argument 2 to "__set__" of "D" has incompatible type "int"; expected "str"
+
+[case testAccessingGenericDescriptorFromClass]
+# flags: --strict-optional
+from d import D
+class A:
+    f = D(10)  # type: D[A, int]
+    g = D('10')  # type: D[A, str]
+reveal_type(A.f)  # E: Revealed type is 'd.D[__main__.A*, builtins.int*]'
+reveal_type(A.g)  # E: Revealed type is 'd.D[__main__.A*, builtins.str*]'
+reveal_type(A().f)  # E: Revealed type is 'builtins.int*'
+reveal_type(A().g)  # E: Revealed type is 'builtins.str*'
+[file d.pyi]
+from typing import TypeVar, Type, Generic, overload
+T = TypeVar('T')
+V = TypeVar('V')
+class D(Generic[T, V]):
+    def __init__(self, v: V) -> None: pass
+    @overload
+    def __get__(self, inst: None, own: Type[T]) -> 'D[T, V]': pass
+    @overload
+    def __get__(self, inst: T, own: Type[T]) -> V: pass
+[builtins fixtures/bool.pyi]
+
+[case testAccessingGenericDescriptorFromInferredClass]
+# flags: --strict-optional
+from typing import Type
+from d import D
+class A:
+    f = D(10)  # type: D[A, int]
+    g = D('10')  # type: D[A, str]
+def f(some_class: Type[A]):
+    reveal_type(some_class.f)
+    reveal_type(some_class.g)
+[file d.pyi]
+from typing import TypeVar, Type, Generic, overload
+T = TypeVar('T')
+V = TypeVar('V')
+class D(Generic[T, V]):
+    def __init__(self, v: V) -> None: pass
+    @overload
+    def __get__(self, inst: None, own: Type[T]) -> 'D[T, V]': pass
+    @overload
+    def __get__(self, inst: T, own: Type[T]) -> V: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:8: error: Revealed type is 'd.D[__main__.A*, builtins.int*]'
+main:9: error: Revealed type is 'd.D[__main__.A*, builtins.str*]'
+
+[case testAccessingGenericDescriptorFromClassBadOverload]
+# flags: --strict-optional
+from d import D
+class A:
+    f = D(10)  # type: D[A, int]
+reveal_type(A.f)
+[file d.pyi]
+from typing import TypeVar, Type, Generic, overload
+T = TypeVar('T')
+V = TypeVar('V')
+class D(Generic[T, V]):
+    def __init__(self, v: V) -> None: pass
+    @overload
+    def __get__(self, inst: None, own: None) -> 'D[T, V]': pass
+    @overload
+    def __get__(self, inst: T, own: Type[T]) -> V: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:5: error: Revealed type is 'Any'
+main:5: error: No overload variant of "__get__" of "D" matches argument types [builtins.None, Type[__main__.A]]
+
+[case testAccessingNonDataDescriptorSubclass]
+from typing import Any
+class C:
+    def __get__(self, inst: Any, own: Any) -> str: return 's'
+class D(C): pass
+class A:
+    f = D()
+a = A()
+reveal_type(a.f)  # E: Revealed type is 'builtins.str'
+
+[case testSettingDataDescriptorSubclass]
+from typing import Any
+class C:
+    def __get__(self, inst: Any, own: Any) -> str: return 's'
+    def __set__(self, inst: Any, v: str) -> None: pass
+class D(C): pass
+class A:
+    f = D()
+a = A()
+a.f = ''
+a.f = 1 # E: Argument 2 to "__set__" of "C" has incompatible type "int"; expected "str"
+
+[case testReadingDescriptorSubclassWithoutDunderGet]
+from typing import Union, Any
+class C:
+    def __set__(self, inst: Any, v: str) -> None: pass
+class D(C): pass
+class A:
+    f = D()
+    def __init__(self): self.f = 's'
+a = A()
+reveal_type(a.f)  # E: Revealed type is '__main__.D'
+
+[case testAccessingGenericNonDataDescriptorSubclass]
+from typing import TypeVar, Type, Generic, Any
+V = TypeVar('V')
+class C(Generic[V]):
+    def __init__(self, v: V) -> None: self.v = v
+    def __get__(self, inst: Any, own: Type) -> V: return self.v
+class D(C[V], Generic[V]): pass
+class A:
+    f = D(10)
+    g = D('10')
+a = A()
+reveal_type(a.f)  # E: Revealed type is 'builtins.int*'
+reveal_type(a.g)  # E: Revealed type is 'builtins.str*'
+
+[case testSettingGenericDataDescriptorSubclass]
+from typing import TypeVar, Type, Generic
+T = TypeVar('T')
+V = TypeVar('V')
+class C(Generic[T, V]):
+    def __init__(self, v: V) -> None: self.v = v
+    def __get__(self, inst: T, own: Type[T]) -> V: return self.v
+    def __set__(self, inst: T, v: V) -> None: pass
+class D(C[T, V], Generic[T, V]): pass
+class A:
+    f = D(10)  # type: D[A, int]
+    g = D('10')  # type: D[A, str]
+a = A()
+a.f = 1
+a.f = '' # E: Argument 2 to "__set__" of "C" has incompatible type "str"; expected "int"
+a.g = ''
+a.g = 1 # E: Argument 2 to "__set__" of "C" has incompatible type "int"; expected "str"
+
+[case testSetDescriptorOnClass]
+from typing import TypeVar, Type, Generic
+T = TypeVar('T')
+V = TypeVar('V')
+class D(Generic[T, V]):
+    def __init__(self, v: V) -> None: self.v = v
+    def __get__(self, inst: T, own: Type[T]) -> V: return self.v
+    def __set__(self, inst: T, v: V) -> None: pass
+class A:
+    f = D(10)  # type: D[A, int]
+A.f = D(20)
+A.f = D('some string')  # E: Argument 1 to "D" has incompatible type "str"; expected "int"
+
+[case testSetDescriptorOnInferredClass]
+from typing import TypeVar, Type, Generic, Any
+V = TypeVar('V')
+class D(Generic[V]):
+    def __init__(self, v: V) -> None: self.v = v
+    def __get__(self, inst: Any, own: Type) -> V: return self.v
+    def __set__(self, inst: Any, v: V) -> None: pass
+class A:
+    f = D(10)
+def f(some_class: Type[A]):
+    A.f = D(20)
+    A.f = D('some string')
+[out]
+main:11: error: Argument 1 to "D" has incompatible type "str"; expected "int"
+
+[case testDescriptorUncallableDunderSet]
+class D:
+    __set__ = 's'
+class A:
+    f = D()
+A().f = 'x'  # E: __main__.D.__set__ is not callable
+
+[case testDescriptorDunderSetTooFewArgs]
+class D:
+    def __set__(self, inst): pass
+class A:
+    f = D()
+A().f = 'x'  # E: Too many arguments for "__set__"
+
+[case testDescriptorDunderSetTooManyArgs]
+class D:
+    def __set__(self, inst, v, other): pass
+class A:
+    f = D()
+A().f = 'x'  # E: Too few arguments for "__set__"
+
+[case testDescriptorDunderSetWrongArgTypes]
+class D:
+    def __set__(self, inst: str, v:str) -> None: pass
+class A:
+    f = D()
+A().f = 'x'  # E: Argument 1 to "__set__" of "D" has incompatible type "A"; expected "str"
+
+[case testDescriptorUncallableDunderGet]
+class D:
+    __get__ = 's'
+class A:
+    f = D()
+A().f  # E: __main__.D.__get__ is not callable
+
+[case testDescriptorDunderGetTooFewArgs]
+class D:
+    def __get__(self, inst): pass
+class A:
+    f = D()
+A().f  # E: Too many arguments for "__get__"
+
+[case testDescriptorDunderGetTooManyArgs]
+class D:
+    def __get__(self, inst, own, other): pass
+class A:
+    f = D()
+A().f = 'x'  # E: Too few arguments for "__get__"
+
+[case testDescriptorDunderGetWrongArgTypeForInstance]
+from typing import Any
+class D:
+    def __get__(self, inst: str, own: Any) -> Any: pass
+class A:
+    f = D()
+A().f  # E: Argument 1 to "__get__" of "D" has incompatible type "A"; expected "str"
+
+[case testDescriptorDunderGetWrongArgTypeForOwner]
+from typing import Any
+class D:
+    def __get__(self, inst: Any, own: str) -> Any: pass
+class A:
+    f = D()
+A().f  # E: Argument 2 to "__get__" of "D" has incompatible type Type[A]; expected "str"
+
+[case testDescriptorGetSetDifferentTypes]
+from typing import Any
+class D:
+    def __get__(self, inst: Any, own: Any) -> str: return 's'
+    def __set__(self, inst: Any, v: int) -> None: pass
+class A:
+    f = D()
+a = A()
+a.f = 1
+reveal_type(a.f)  # E: Revealed type is 'builtins.str'
+
+
+-- _promote decorators
+-- -------------------
+
+
+[case testSimpleDucktypeDecorator]
+from typing import _promote
+class A: pass
+ at _promote(A)
+class B: pass
+a = None  # type: A
+b = None  # type: B
+b = a # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = b
+
+[case testDucktypeTransitivityDecorator]
+from typing import _promote
+class A: pass
+ at _promote(A)
+class B: pass
+ at _promote(B)
+class C: pass
+a = None  # type: A
+c = None  # type: C
+c = a # E: Incompatible types in assignment (expression has type "A", variable has type "C")
+a = c
+
+
+-- Hard coded type promotions
+-- --------------------------
+
+[case testHardCodedTypePromotions]
+import typing
+def f(x: float) -> None: pass
+def g(x: complex) -> None: pass
+f(1)
+g(1)
+g(1.1)
+[builtins fixtures/complex.pyi]
+
+
+-- Operator methods
+-- ----------------
+
+
+[case testOperatorMethodOverrideIntroducingOverloading]
+from typing import overload
+class A:
+    def __add__(self, x: int) -> int: pass
+class B(A):
+    @overload  # E: Signature of "__add__" incompatible with supertype "A"
+    def __add__(self, x: int) -> int: pass
+    @overload
+    def __add__(self, x: str) -> str: pass
+[out]
+
+[case testOperatorMethodOverrideWideningArgumentType]
+import typing
+class A:
+    def __add__(self, x: int) -> int: pass
+class B(A):
+    def __add__(self, x: object) -> int: pass
+[out]
+
+[case testOperatorMethodOverrideNarrowingReturnType]
+import typing
+class A:
+    def __add__(self, x: int) -> 'A': pass
+class B(A):
+    def __add__(self, x: int) -> 'B': pass
+
+[case testOperatorMethodOverrideWithDynamicallyTyped]
+import typing
+class A:
+    def __add__(self, x: int) -> 'A': pass
+class B(A):
+    def __add__(self, x): pass
+
+[case testOperatorMethodOverrideWithIdenticalOverloadedType]
+from typing import overload
+class A:
+    @overload
+    def __add__(self, x: int) -> 'A': pass
+    @overload
+    def __add__(self, x: str) -> 'A': pass
+class B(A):
+    @overload
+    def __add__(self, x: int) -> 'A': pass
+    @overload
+    def __add__(self, x: str) -> 'A': pass
+
+[case testOverloadedOperatorMethodOverrideWithDynamicallyTypedMethod]
+from typing import overload, Any
+class A:
+    @overload
+    def __add__(self, x: int) -> 'A': pass
+    @overload
+    def __add__(self, x: str) -> 'A': pass
+class B(A):
+    def __add__(self, x): pass
+class C(A):
+    def __add__(self, x: Any) -> A: pass
+
+[case testOverloadedOperatorMethodOverrideWithNewItem]
+from typing import overload, Any
+class A:
+    @overload
+    def __add__(self, x: int) -> 'A': pass
+    @overload
+    def __add__(self, x: str) -> 'A': pass
+class B(A):
+    @overload
+    def __add__(self, x: int) -> A: pass
+    @overload
+    def __add__(self, x: str) -> A: pass
+    @overload
+    def __add__(self, x: type) -> A: pass
+[out]
+main:8: error: Signature of "__add__" incompatible with supertype "A"
+
+[case testOverloadedOperatorMethodOverrideWithSwitchedItemOrder]
+from typing import overload, Any
+class A:
+    @overload
+    def __add__(self, x: 'B') -> 'B': pass
+    @overload
+    def __add__(self, x: 'A') -> 'A': pass
+class B(A):
+    @overload
+    def __add__(self, x: 'A') -> 'A': pass
+    @overload
+    def __add__(self, x: 'B') -> 'B': pass
+[out]
+main:8: error: Signature of "__add__" incompatible with supertype "A"
+
+[case testReverseOperatorMethodArgumentType]
+from typing import Any
+class A: pass
+class B:
+    def __radd__(self, x: A) -> int: pass # Error
+class C:
+    def __radd__(self, x: A) -> Any: pass
+class D:
+    def __radd__(self, x: A) -> object: pass
+[out]
+
+[case testReverseOperatorMethodArgumentType2]
+from typing import Any, Tuple, Callable
+class A:
+    def __radd__(self, x: Tuple[int, str]) -> int: pass
+class B:
+    def __radd__(self, x: Callable[[], int]) -> int: pass
+class C:
+    def __radd__(self, x: Any) -> int: pass
+[out]
+
+[case testReverseOperatorMethodForwardIsAny]
+from typing import Any
+def deco(f: Any) -> Any: return f
+class C:
+    @deco
+    def __add__(self, other: C) -> C: return C()
+    def __radd__(self, other: C) -> C: return C()
+[out]
+
+[case testReverseOperatorMethodForwardIsAny2]
+from typing import Any
+def deco(f: Any) -> Any: return f
+class C:
+    __add__ = None  # type: Any
+    def __radd__(self, other: C) -> C: return C()
+[out]
+
+[case testReverseOperatorMethodForwardIsAny3]
+from typing import Any
+def deco(f: Any) -> Any: return f
+class C:
+    __add__ = 42
+    def __radd__(self, other: C) -> C: return C()
+[out]
+main:5: error: Forward operator "__add__" is not callable
+
+[case testOverloadedReverseOperatorMethodArgumentType]
+from typing import overload, Any
+class A:
+    @overload
+    def __radd__(self, x: 'A') -> str: pass # Error
+    @overload
+    def __radd__(self, x: 'A') -> Any: pass
+[out]
+
+[case testReverseOperatorMethodArgumentTypeAndOverloadedMethod]
+from typing import overload
+class A:
+    @overload
+    def __add__(self, x: int) -> int: pass
+    @overload
+    def __add__(self, x: str) -> int: pass
+    def __radd__(self, x: 'A') -> str: pass
+
+[case testAbstractReverseOperatorMethod]
+import typing
+from abc import abstractmethod
+class A:
+    @abstractmethod
+    def __lt__(self, x: 'A') -> int: pass
+class B:
+    @abstractmethod
+    def __lt__(self, x: 'B') -> int: pass
+    @abstractmethod
+    def __gt__(self, x: 'B') -> int: pass
+[out]
+
+[case testOperatorMethodsAndOverloadingSpecialCase]
+from typing import overload
+class A:
+    @overload
+    def __add__(self, x: 'A') -> int: pass
+    @overload
+    def __add__(self, x: str) -> int: pass
+class B:
+    def __radd__(self, x: 'A') -> str: pass
+[out]
+
+[case testUnsafeOverlappingWithOperatorMethodsAndOverloading2]
+from typing import overload
+class A:
+    def __add__(self, x: 'A') -> int: pass
+class B:
+    @overload
+    def __radd__(self, x: 'X') -> str: pass # Error
+    @overload
+    def __radd__(self, x: A) -> str: pass   # Error
+class X:
+    def __add__(self, x): pass
+[out]
+main:6: error: Signatures of "__radd__" of "B" and "__add__" of "X" are unsafely overlapping
+
+[case testUnsafeOverlappingWithLineNo]
+from typing import TypeVar
+T = TypeVar('T', Real)
+class Real:
+    def __add__(self, other): ...
+class Fraction(Real):
+    def __radd__(self, other: T) -> T: ...
+[out]
+main:6: error: Signatures of "__radd__" of "Fraction" and "__add__" of "Real" are unsafely overlapping
+
+[case testOverlappingNormalAndInplaceOperatorMethod]
+import typing
+class A:
+    # Incompatible (potential trouble with __radd__)
+    def __add__(self, x: 'A') -> int: pass
+    def __iadd__(self, x: 'B') -> int: pass
+class B:
+    # Safe
+    def __add__(self, x: 'C') -> int: pass
+    def __iadd__(self, x: A) -> int: pass
+class C(A): pass
+[out]
+main:5: error: Signatures of "__iadd__" and "__add__" are incompatible
+
+[case testOverloadedNormalAndInplaceOperatorMethod]
+from typing import overload
+class A:
+    @overload
+    def __add__(self, x: int) -> int: pass
+    @overload
+    def __add__(self, x: str) -> int: pass
+    @overload # Error
+    def __iadd__(self, x: int) -> int: pass
+    @overload
+    def __iadd__(self, x: object) -> int: pass
+class B:
+    @overload
+    def __add__(self, x: int) -> int: pass
+    @overload
+    def __add__(self, x: str) -> str: pass
+    @overload
+    def __iadd__(self, x: int) -> int: pass
+    @overload
+    def __iadd__(self, x: str) -> str: pass
+[out]
+main:7: error: Signatures of "__iadd__" and "__add__" are incompatible
+
+[case testIntroducingInplaceOperatorInSubclass]
+import typing
+class A:
+    def __add__(self, x: 'A') -> 'B': pass
+class B(A):
+    # __iadd__ effectively partially overrides __add__
+    def __iadd__(self, x: 'A') -> 'A': pass # Error
+class C(A):
+    def __iadd__(self, x: int) -> 'B': pass # Error
+class D(A):
+    def __iadd__(self, x: 'A') -> 'B': pass
+[out]
+main:6: error: Return type of "__iadd__" incompatible with "__add__" of supertype "A"
+main:8: error: Argument 1 of "__iadd__" incompatible with "__add__" of supertype "A"
+main:8: error: Signatures of "__iadd__" and "__add__" are incompatible
+
+[case testGetAttribute]
+
+a, b = None, None # type: A, B
+class A:
+    def __getattribute__(self, x: str) -> A:
+        return A()
+class B: pass
+
+a = a.foo
+b = a.bar
+[out]
+main:9: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testGetAttributeSignature]
+class A:
+    def __getattribute__(self, x: str) -> A: pass
+class B:
+    def __getattribute__(self, x: A) -> B: pass
+class C:
+    def __getattribute__(self, x: str, y: str) -> C: pass
+class D:
+    def __getattribute__(self, x: str) -> None: pass
+[out]
+main:4: error: Invalid signature "def (__main__.B, __main__.A) -> __main__.B"
+main:6: error: Invalid signature "def (__main__.C, builtins.str, builtins.str) -> __main__.C"
+
+[case testGetAttr]
+
+a, b = None, None # type: A, B
+class A:
+    def __getattr__(self, x: str) -> A:
+        return A()
+class B: pass
+
+a = a.foo
+b = a.bar
+[out]
+main:9: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+
+[case testGetAttrSignature]
+class A:
+    def __getattr__(self, x: str) -> A: pass
+class B:
+    def __getattr__(self, x: A) -> B: pass
+class C:
+    def __getattr__(self, x: str, y: str) -> C: pass
+class D:
+    def __getattr__(self, x: str) -> None: pass
+[out]
+main:4: error: Invalid signature "def (__main__.B, __main__.A) -> __main__.B"
+main:6: error: Invalid signature "def (__main__.C, builtins.str, builtins.str) -> __main__.C"
+
+
+-- CallableType objects
+-- ----------------
+
+
+[case testCallableObject]
+import typing
+a = A()
+b = B()
+
+a()  # E: Too few arguments for "__call__" of "A"
+a(a, a)  # E: Too many arguments for "__call__" of "A"
+a = a(a)
+a = a(b)  # E: Argument 1 to "__call__" of "A" has incompatible type "B"; expected "A"
+b = a(a)  # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+class A:
+    def __call__(self, x: A) -> A:
+        pass
+class B: pass
+
+
+-- __new__
+-- --------
+
+
+[case testConstructInstanceWith__new__]
+class C:
+    def __new__(cls, foo: int = None) -> 'C':
+        obj = object.__new__(cls)
+        return obj
+
+x = C(foo=12)
+x.a # E: "C" has no attribute "a"
+C(foo='') # E: Argument 1 to "C" has incompatible type "str"; expected "int"
+[builtins fixtures/__new__.pyi]
+
+[case testConstructInstanceWithDynamicallyTyped__new__]
+class C:
+    def __new__(cls, foo):
+        obj = object.__new__(cls)
+        return obj
+
+x = C(foo=12)
+x = C(foo='x')
+x.a # E: "C" has no attribute "a"
+C(bar='') # E: Unexpected keyword argument "bar" for "C"
+[builtins fixtures/__new__.pyi]
+
+[case testClassWith__new__AndCompatibilityWithType]
+class C:
+    def __new__(cls, foo: int = None) -> 'C':
+        obj = object.__new__(cls)
+        return obj
+def f(x: type) -> None: pass
+def g(x: int) -> None: pass
+f(C)
+g(C) # E: Argument 1 to "g" has incompatible type "C"; expected "int"
+[builtins fixtures/__new__.pyi]
+
+[case testClassWith__new__AndCompatibilityWithType2]
+class C:
+    def __new__(cls, foo):
+        obj = object.__new__(cls)
+        return obj
+def f(x: type) -> None: pass
+def g(x: int) -> None: pass
+f(C)
+g(C) # E: Argument 1 to "g" has incompatible type "C"; expected "int"
+[builtins fixtures/__new__.pyi]
+
+[case testGenericClassWith__new__]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class C(Generic[T]):
+    def __new__(cls, foo: T) -> 'C[T]':
+        obj = object.__new__(cls)
+        return obj
+    def set(self, x: T) -> None: pass
+c = C('')
+c.set('')
+c.set(1) # E: Argument 1 to "set" of "C" has incompatible type "int"; expected "str"
+[builtins fixtures/__new__.pyi]
+
+[case testOverloaded__new__]
+from typing import overload
+class C:
+    @overload
+    def __new__(cls, foo: int) -> 'C':
+        obj = object.__new__(cls)
+        return obj
+    @overload
+    def __new__(cls, x: str, y: str) -> 'C':
+        obj = object.__new__(cls)
+        return obj
+c = C(1)
+c.a # E: "C" has no attribute "a"
+C('', '')
+C('') # E: No overload variant of "C" matches argument types [builtins.str]
+[builtins fixtures/__new__.pyi]
+
+
+-- Special cases
+-- -------------
+
+
+[case testSubclassInt]
+import typing
+class A(int): pass
+n = 0
+n = A()
+a = A()
+a = 0 # E: Incompatible types in assignment (expression has type "int", variable has type "A")
+
+[case testForwardReferenceToNestedClass]
+def f(o: 'B.C') -> None:
+    o.f('') # E: Argument 1 to "f" of "C" has incompatible type "str"; expected "int"
+
+class B:
+    class C:
+        def f(self, x: int) -> None: pass
+[out]
+
+[case testForwardReferenceToNestedClassDeep]
+def f(o: 'B.C.D') -> None:
+    o.f('') # E: Argument 1 to "f" of "D" has incompatible type "str"; expected "int"
+
+class B:
+    class C:
+        class D:
+            def f(self, x: int) -> None: pass
+[out]
+
+[case testForwardReferenceToNestedClassWithinClass]
+class B:
+    def f(self, o: 'C.D') -> None:
+        o.f('') # E: Argument 1 to "f" of "D" has incompatible type "str"; expected "int"
+
+    class C:
+        class D:
+            def f(self, x: int) -> None: pass
+[out]
+
+[case testClassVsInstanceDisambiguation]
+class A: pass
+def f(x: A) -> None: pass
+f(A) # E: Argument 1 to "f" has incompatible type "A" (type object); expected "A"
+[out]
+
+-- TODO
+--   attribute inherited from superclass; assign in __init__
+--   refer to attribute before type has been inferred (the initialization in
+--   __init__ has not been analyzed)
+
+[case testAnyBaseClassUnconstrainedConstructor]
+from typing import Any
+B = None  # type: Any
+class C(B): pass
+C(0)
+C(arg=0)
+[out]
+
+[case testErrorMapToSupertype]
+import typing
+class X(Nope): pass  # E: Name 'Nope' is not defined
+a, b = X()  # Used to crash here (#2244)
+
+
+-- Class-valued attributes
+-- -----------------------
+
+[case testClassValuedAttributesBasics]
+class A: ...
+class B:
+    a = A
+    bad = lambda: 42
+
+B().bad() # E: Invalid method type
+reveal_type(B.a) # E: Revealed type is 'def () -> __main__.A'
+reveal_type(B().a) # E: Revealed type is 'def () -> __main__.A'
+reveal_type(B().a()) # E: Revealed type is '__main__.A'
+
+class C:
+    a = A
+    def __init__(self) -> None:
+        self.aa = self.a()
+
+reveal_type(C().aa) # E: Revealed type is '__main__.A'
+[out]
+
+[case testClassValuedAttributesGeneric]
+from typing import Generic, TypeVar
+T = TypeVar('T')
+
+class A(Generic[T]):
+    def __init__(self, x: T) -> None:
+        self.x = x
+class B(Generic[T]):
+    a = A[T]
+
+reveal_type(B[int]().a) # E: Revealed type is 'def (x: builtins.int*) -> __main__.A[builtins.int*]'
+B[int]().a('hi') # E: Argument 1 has incompatible type "str"; expected "int"
+
+class C(Generic[T]):
+    a = A
+    def __init__(self) -> None:
+        self.aa = self.a(42)
+
+reveal_type(C().aa) # E: Revealed type is '__main__.A[builtins.int]'
+[out]
+
+[case testClassValuedAttributesAlias]
+from typing import Generic, TypeVar
+T = TypeVar('T')
+S = TypeVar('S')
+
+class A(Generic[T, S]): ...
+
+SameA = A[T, T]
+
+class B:
+    a_any = SameA
+    a_int = SameA[int]
+
+reveal_type(B().a_any) # E: Revealed type is 'def () -> __main__.A[Any, Any]'
+reveal_type(B().a_int()) # E: Revealed type is '__main__.A[builtins.int*, builtins.int*]'
+
+class C:
+    a_int = SameA[int]
+    def __init__(self) -> None:
+        self.aa = self.a_int()
+
+reveal_type(C().aa) # E: Revealed type is '__main__.A[builtins.int*, builtins.int*]'
+[out]
+
+
+-- Type[C]
+-- -------
+
+
+[case testTypeUsingTypeCBasic]
+from typing import Type
+class User: pass
+class ProUser(User): pass
+def new_user(user_class: Type[User]) -> User:
+    return user_class()
+reveal_type(new_user(User))  # E: Revealed type is '__main__.User'
+reveal_type(new_user(ProUser))  # E: Revealed type is '__main__.User'
+[out]
+
+[case testTypeUsingTypeCDefaultInit]
+from typing import Type
+class B:
+    pass
+def f(A: Type[B]) -> None:
+    A(0)  # E: Too many arguments for "B"
+    A()
+[out]
+
+[case testTypeUsingTypeCInitWithArg]
+from typing import Type
+class B:
+    def __init__(self, a: int) -> None: pass
+def f(A: Type[B]) -> None:
+    A(0)
+    A()  # E: Too few arguments for "B"
+[out]
+
+[case testTypeUsingTypeCTypeVar]
+from typing import Type, TypeVar
+class User: pass
+class ProUser(User): pass
+U = TypeVar('U', bound=User)
+def new_user(user_class: Type[U]) -> U:
+    user = user_class()
+    reveal_type(user)
+    return user
+pro_user = new_user(ProUser)
+reveal_type(pro_user)
+[out]
+main:7: error: Revealed type is 'U`-1'
+main:10: error: Revealed type is '__main__.ProUser*'
+
+[case testTypeUsingTypeCTypeVarDefaultInit]
+from typing import Type, TypeVar
+class B:
+    pass
+T = TypeVar('T', bound=B)
+def f(A: Type[T]) -> None:
+    A()
+    A(0)  # E: Too many arguments for "B"
+[out]
+
+[case testTypeUsingTypeCTypeVarWithInit]
+from typing import Type, TypeVar
+class B:
+    def __init__(self, a: int) -> None: pass
+T = TypeVar('T', bound=B)
+def f(A: Type[T]) -> None:
+    A()  # E: Too few arguments for "B"
+    A(0)
+[out]
+
+[case testTypeUsingTypeCTwoTypeVars]
+from typing import Type, TypeVar
+class User: pass
+class ProUser(User): pass
+class WizUser(ProUser): pass
+U = TypeVar('U', bound=User)
+def new_user(u_c: Type[U]) -> U: pass
+P = TypeVar('P', bound=ProUser)
+def new_pro(pro_c: Type[P]) -> P:
+    return new_user(pro_c)
+wiz = new_pro(WizUser)
+reveal_type(wiz)
+def error(u_c: Type[U]) -> P:
+    return new_pro(u_c)  # Error here, see below
+[out]
+main:11: error: Revealed type is '__main__.WizUser*'
+main:13: error: Incompatible return value type (got "U", expected "P")
+main:13: error: Type argument 1 of "new_pro" has incompatible value "U"
+
+[case testTypeUsingTypeCCovariance]
+from typing import Type, TypeVar
+class User: pass
+class ProUser(User): pass
+def new_user(user_class: Type[User]) -> User:
+    return user_class()
+def new_pro_user(user_class: Type[ProUser]):
+    new_user(user_class)
+[out]
+
+[case testTypeUsingTypeCErrorCovariance]
+from typing import Type, TypeVar
+class User: pass
+def new_user(user_class: Type[User]):
+    return user_class()
+def foo(arg: Type[int]):
+    new_user(arg)  # E: Argument 1 to "new_user" has incompatible type Type[int]; expected Type[User]
+[out]
+
+[case testTypeUsingTypeCUnionOverload]
+from typing import Type, Union, overload
+class X:
+    @overload
+    def __init__(self) -> None: pass
+    @overload
+    def __init__(self, a: int) -> None: pass
+class Y:
+    def __init__(self) -> None: pass
+def bar(o: Type[Union[X, Y]]): pass
+bar(X)
+bar(Y)
+[out]
+
+[case testTypeUsingTypeCTypeAny]
+from typing import Type, Any
+def foo(arg: Type[Any]):
+    x = arg()
+    x = arg(0)
+    x = arg('', ())
+    reveal_type(x)  # E: Revealed type is 'Any'
+    x.foo
+class X: pass
+foo(X)
+[out]
+
+[case testTypeUsingTypeCTypeNoArg]
+from typing import Type
+def foo(arg: Type):
+    x = arg()
+    reveal_type(x)  # E: Revealed type is 'Any'
+class X: pass
+foo(X)
+[out]
+
+[case testTypeUsingTypeCBuiltinType]
+from typing import Type
+def foo(arg: type): pass
+class X: pass
+def bar(arg: Type[X]):
+    foo(arg)
+foo(X)
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testTypeUsingTypeCClassMethod]
+from typing import Type
+class User:
+    @classmethod
+    def foo(cls) -> int: pass
+    def bar(self) -> int: pass
+def process(cls: Type[User]):
+    reveal_type(cls.foo())  # E: Revealed type is 'builtins.int'
+    obj = cls()
+    reveal_type(cls.bar(obj))  # E: Revealed type is 'builtins.int'
+    cls.mro()  # Defined in class type
+    cls.error  # E: Type[User] has no attribute "error"
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeUsingTypeCClassMethodUnion]
+# Ideally this would work, but not worth the effort; just don't crash
+from typing import Type, Union
+class User:
+    @classmethod
+    def foo(cls) -> int: pass
+    def bar(self) -> int: pass
+class ProUser(User): pass
+class BasicUser(User): pass
+def process(cls: Type[Union[BasicUser, ProUser]]):
+    cls.foo()  # E: Type[Union[BasicUser, ProUser]] has no attribute "foo"
+    obj = cls()
+    cls.bar(obj)  # E: Type[Union[BasicUser, ProUser]] has no attribute "bar"
+    cls.mro()  # Defined in class type
+    cls.error  # E: Type[Union[BasicUser, ProUser]] has no attribute "error"
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeUsingTypeCClassMethodFromTypeVar]
+from typing import Type, TypeVar
+class User:
+    @classmethod
+    def foo(cls) -> int: pass
+    def bar(self) -> int: pass
+U = TypeVar('U', bound=User)
+def process(cls: Type[U]):
+    reveal_type(cls.foo())  # E: Revealed type is 'builtins.int'
+    obj = cls()
+    reveal_type(cls.bar(obj))  # E: Revealed type is 'builtins.int'
+    cls.mro()  # Defined in class type
+    cls.error  # E: Type[U] has no attribute "error"
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeUsingTypeCClassMethodFromTypeVarUnionBound]
+# Ideally this would work, but not worth the effort; just don't crash
+from typing import Type, TypeVar, Union
+class User:
+    @classmethod
+    def foo(cls) -> int: pass
+    def bar(self) -> int: pass
+class ProUser(User): pass
+class BasicUser(User): pass
+U = TypeVar('U', bound=Union[ProUser, BasicUser])
+def process(cls: Type[U]):
+    cls.foo()  # E: Type[U] has no attribute "foo"
+    obj = cls()
+    cls.bar(obj)  # E: Type[U] has no attribute "bar"
+    cls.mro()  # Defined in class type
+    cls.error  # E: Type[U] has no attribute "error"
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeUsingTypeCErrorUnsupportedType]
+from typing import Type, Tuple
+def foo(arg: Type[Tuple[int]]):  # E: Unsupported type Type["Tuple[int]"]
+    arg()
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testTypeUsingTypeCOverloadedClass]
+from typing import Type, TypeVar, overload
+class User:
+    @overload
+    def __init__(self) -> None: pass
+    @overload
+    def __init__(self, arg: int) -> None: pass
+    @classmethod
+    def foo(cls) -> None: pass
+U = TypeVar('U', bound=User)
+def new(uc: Type[U]) -> U:
+    uc.foo()
+    u = uc()
+    u.foo()
+    u = uc(0)
+    u.foo()
+    u = uc('')
+    u.foo(0)
+    return uc()
+u = new(User)
+[builtins fixtures/classmethod.pyi]
+[out]
+main:16: error: No overload variant of "User" matches argument types [builtins.str]
+main:17: error: Too many arguments for "foo" of "User"
+
+[case testTypeUsingTypeCInUpperBound]
+from typing import TypeVar, Type
+class B: pass
+T = TypeVar('T', bound=Type[B])
+def f(a: T): pass
+[out]
+
+[case testTypeUsingTypeCTuple]
+from typing import Type, Tuple
+def f(a: Type[Tuple[int, int]]):
+    a()
+[out]
+main:2: error: Unsupported type Type["Tuple[int, int]"]
+
+[case testTypeUsingTypeCNamedTuple]
+from typing import Type, NamedTuple
+N = NamedTuple('N', [('x', int), ('y', int)])
+def f(a: Type[N]):
+    a()
+[builtins fixtures/list.pyi]
+[out]
+main:3: error: Unsupported type Type["N"]
+
+[case testTypeUsingTypeCJoin]
+from typing import Type
+class B: pass
+class C(B): pass
+class D(B): pass
+def foo(c: Type[C], d: Type[D]) -> None:
+    x = [c, d]
+    reveal_type(x)
+
+[builtins fixtures/list.pyi]
+[out]
+main:7: error: Revealed type is 'builtins.list[Type[__main__.B]]'
+
+[case testTypeMatchesOverloadedFunctions]
+from typing import Type, overload, Union
+
+class User: pass
+UserType = User  # type: Type[User]
+
+ at overload
+def f(a: object) -> int: pass
+ at overload
+def f(a: int) -> str: pass
+
+reveal_type(f(User))  # E: Revealed type is 'builtins.int'
+reveal_type(f(UserType))  # E: Revealed type is 'builtins.int'
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeMatchesGeneralTypeInOverloadedFunctions]
+from typing import Type, overload
+
+class User: pass
+UserType = User  # type: Type[User]
+
+ at overload
+def f(a: type) -> int:
+    return 1
+ at overload
+def f(a: int) -> str:
+    return "a"
+
+reveal_type(f(User))  # E: Revealed type is 'builtins.int'
+reveal_type(f(UserType))  # E: Revealed type is 'builtins.int'
+reveal_type(f(1))  # E: Revealed type is 'builtins.str'
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeMatchesSpecificTypeInOverloadedFunctions]
+from typing import Type, overload
+
+class User: pass
+UserType = User  # type: Type[User]
+
+ at overload
+def f(a: User) -> User:
+    return User()
+ at overload
+def f(a: Type[User]) -> int:
+    return 1
+ at overload
+def f(a: int) -> str:
+    return "a"
+
+reveal_type(f(User))  # E: Revealed type is 'builtins.int'
+reveal_type(f(UserType))  # E: Revealed type is 'builtins.int'
+reveal_type(f(User()))  # E: Revealed type is '__main__.User'
+reveal_type(f(1))  # E: Revealed type is 'builtins.str'
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testMixingTypeTypeInOverloadedFunctions]
+from typing import Type, overload
+
+class User: pass
+
+ at overload
+def f(a: User) -> Type[User]:
+    return User
+ at overload
+def f(a: Type[User]) -> User:
+    return a()
+ at overload
+def f(a: int) -> Type[User]:
+    return User
+ at overload
+def f(a: str) -> User:
+    return User()
+
+reveal_type(f(User()))  # E: Revealed type is 'Type[__main__.User]'
+reveal_type(f(User))  # E: Revealed type is '__main__.User'
+reveal_type(f(3))  # E: Revealed type is 'Type[__main__.User]'
+reveal_type(f("hi"))  # E: Revealed type is '__main__.User'
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testGeneralTypeDoesNotMatchSpecificTypeInOverloadedFunctions]
+from typing import Type, overload
+
+class User: pass
+
+ at overload
+def f(a: Type[User]) -> None: pass
+ at overload
+def f(a: int) -> None: pass
+
+def mock() -> type: return User
+
+f(User)
+f(mock())  # E: No overload variant of "f" matches argument types [builtins.type]
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testNonTypeDoesNotMatchOverloadedFunctions]
+from typing import Type, overload
+
+class User: pass
+
+ at overload
+def f(a: Type[User]) -> None: pass
+ at overload
+def f(a: type) -> None: pass
+
+f(3)  # E: No overload variant of "f" matches argument types [builtins.int]
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testInstancesDoNotMatchTypeInOverloadedFunctions]
+from typing import Type, overload
+
+class User: pass
+
+ at overload
+def f(a: Type[User]) -> None: pass
+ at overload
+def f(a: int) -> None: pass
+
+f(User)
+f(User())  # E: No overload variant of "f" matches argument types [__main__.User]
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeCovarianceWithOverloadedFunctions]
+from typing import Type, overload
+
+class A: pass
+class B(A): pass
+class C(B): pass
+AType = A  # type: Type[A]
+BType = B  # type: Type[B]
+CType = C  # type: Type[C]
+
+ at overload
+def f(a: Type[B]) -> None: pass
+ at overload
+def f(a: int) -> None: pass
+
+f(A)  # E: No overload variant of "f" matches argument types [def () -> __main__.A]
+f(B)
+f(C)
+f(AType)  # E: No overload variant of "f" matches argument types [Type[__main__.A]]
+f(BType)
+f(CType)
+[builtins fixtures/classmethod.pyi]
+[out]
+
+
+[case testOverloadedCovariantTypesFail]
+from typing import Type, overload
+
+class A: pass
+class B(A): pass
+
+ at overload
+def f(a: Type[A]) -> int: pass  # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+ at overload
+def f(a: Type[B]) -> str: pass
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testDistinctOverloadedCovariantTypesSucceed]
+from typing import Type, overload
+
+class A: pass
+class AChild(A): pass
+class B: pass
+class BChild(B): pass
+
+ at overload
+def f(a: Type[A]) -> int: pass
+ at overload
+def f(a: Type[B]) -> str: pass
+ at overload
+def f(a: A) -> A: pass
+ at overload
+def f(a: B) -> B: pass
+
+reveal_type(f(A))  # E: Revealed type is 'builtins.int'
+reveal_type(f(AChild))  # E: Revealed type is 'builtins.int'
+reveal_type(f(B))  # E: Revealed type is 'builtins.str'
+reveal_type(f(BChild))  # E: Revealed type is 'builtins.str'
+
+reveal_type(f(A()))  # E: Revealed type is '__main__.A'
+reveal_type(f(AChild()))  # E: Revealed type is '__main__.A'
+reveal_type(f(B()))  # E: Revealed type is '__main__.B'
+reveal_type(f(BChild()))  # E: Revealed type is '__main__.B'
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeTypeOverlapsWithObjectAndType]
+from typing import Type, overload
+
+class User: pass
+
+ at overload
+def f(a: Type[User]) -> int: pass  # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+ at overload
+def f(a: object) -> str: pass
+
+ at overload
+def g(a: Type[User]) -> int: pass  # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+ at overload
+def g(a: type) -> str: pass
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeOverlapsWithObject]
+from typing import Type, overload
+
+class User: pass
+
+ at overload
+def f(a: type) -> int: pass  # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+ at overload
+def f(a: object) -> str: pass
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeConstructorReturnsTypeType]
+class User:
+    @classmethod
+    def test_class_method(cls) -> int: pass
+    @staticmethod
+    def test_static_method() -> str: pass
+    def test_instance_method(self) -> None: pass
+
+u = User()
+
+reveal_type(type(u))  # E: Revealed type is 'Type[__main__.User]'
+reveal_type(type(u).test_class_method())  # E: Revealed type is 'builtins.int'
+reveal_type(type(u).test_static_method())  # E: Revealed type is 'builtins.str'
+type(u).test_instance_method()  # E: Too few arguments for "test_instance_method" of "User"
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testObfuscatedTypeConstructorReturnsTypeType]
+from typing import TypeVar
+class User: pass
+
+f1 = type
+
+A = TypeVar('A')
+def f2(func: A) -> A:
+    return func
+
+u = User()
+
+reveal_type(f1(u))  # E: Revealed type is 'Type[__main__.User]'
+reveal_type(f2(type)(u))  # E: Revealed type is 'Type[__main__.User]'
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testTypeConstructorLookalikeFails]
+class User: pass
+
+def fake1(a: object) -> type:
+    return User
+def fake2(a: int) -> type:
+    return User
+
+reveal_type(type(User()))  # E: Revealed type is 'Type[__main__.User]'
+reveal_type(fake1(User()))  # E: Revealed type is 'builtins.type'
+reveal_type(fake2(3))  # E: Revealed type is 'builtins.type'
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testOtherTypeConstructorsSucceed]
+def foo(self) -> int: return self.attr
+
+User = type('User', (object,), {'foo': foo, 'attr': 3})
+reveal_type(User)  # E: Revealed type is 'builtins.type'
+[builtins fixtures/args.pyi]
+[out]
+
+[case testTypeTypeComparisonWorks]
+class User: pass
+
+User == User
+User == type(User())
+type(User()) == User
+type(User()) == type(User())
+
+User != User
+User != type(User())
+type(User()) != User
+type(User()) != type(User())
+
+int == int
+int == type(3)
+type(3) == int
+type(3) == type(3)
+
+int != int
+int != type(3)
+type(3) != int
+type(3) != type(3)
+
+User is User
+User is type(User)
+type(User) is User
+type(User) is type(User)
+
+int is int
+int is type(3)
+type(3) is int
+type(3) is type(3)
+
+int.__eq__(int)
+int.__eq__(3, 4)
+[builtins fixtures/args.pyi]
+[out]
+main:33: error: Too few arguments for "__eq__" of "int"
+main:33: error: Unsupported operand types for == ("int" and "int")
+
+[case testMroSetAfterError]
+class C(str, str):
+    foo = 0
+    bar = foo
+[out]
+main:1: error: Duplicate base class "str"
+
+[case testCannotDetermineMro]
+class A: pass
+class B(A): pass
+class C(B): pass
+class D(A, B): pass # E: Cannot determine consistent method resolution order (MRO) for "D"
+class E(C, D): pass # E: Cannot determine consistent method resolution order (MRO) for "E"
+
+[case testInconsistentMroLocalRef]
+class A: pass
+class B(object, A): # E: Cannot determine consistent method resolution order (MRO) for "B"
+    def readlines(self): pass
+    __iter__ = readlines
+
+[case testDynamicMetaclass]
+# flags: --fast-parser
+class C(metaclass=int()):  # E: Dynamic metaclass not supported for 'C'
+    pass
+
+[case testVariableSubclass]
+class A:
+    a = 1  # type: int
+class B(A):
+    a = 1
+[out]
+
+[case testVariableSubclassAssignMismatch]
+class A:
+    a = 1  # type: int
+class B(A):
+    a = "a"
+[out]
+main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
+
+[case testVariableSubclassAssignment]
+class A:
+    a = None  # type: int
+class B(A):
+    def __init__(self) -> None:
+        self.a = "a"
+[out]
+main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testVariableSubclassTypeOverwrite]
+class A:
+    a = None  # type: int
+class B(A):
+    a = None  # type: str
+class C(B):
+    a = "a"
+[out]
+main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
+
+[case testVariableSubclassTypeOverwriteImplicit]
+class A:
+    a = 1
+class B(A):
+    a = None  # type: str
+[out]
+main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
+
+[case testVariableSuperUsage]
+class A:
+    a = []  # type: list
+class B(A):
+    a = [1, 2]
+class C(B):
+    a = B.a + [3]
+[builtins fixtures/list.pyi]
+[out]
+
+[case testClassAllBases]
+from typing import Union
+class A:
+    a = None  # type: Union[int, str]
+class B(A):
+    a = 1
+class C(B):
+    a = "str"
+class D(A):
+    a = "str"
+[out]
+main:7: error: Incompatible types in assignment (expression has type "str", base class "B" defined the type as "int")
+
+[case testVariableTypeVar]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    a = None  # type: T
+class B(A[int]):
+    a = 1
+
+[case testVariableTypeVarInvalid]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    a = None  # type: T
+class B(A[int]):
+    a = "abc"
+[out]
+main:6: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
+
+[case testVariableTypeVarIndirectly]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    a = None  # type: T
+class B(A[int]):
+    pass
+class C(B):
+    a = "a"
+[out]
+main:8: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
+
+[case testVariableTypeVarList]
+from typing import List, TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    a = None  # type: List[T]
+    b = None  # type: List[T]
+class B(A[int]):
+    a = [1]
+    b = ['']
+[builtins fixtures/list.pyi]
+[out]
+main:8: error: List item 0 has incompatible type "str"
+
+[case testVariableMethod]
+class A:
+    def a(self) -> None: pass
+    b = 1
+class B(A):
+    a = 1
+    def b(self) -> None: pass
+[out]
+main:5: error: Incompatible types in assignment (expression has type "int", base class "A" defined the type as Callable[[A], None])
+main:6: error: Signature of "b" incompatible with supertype "A"
+
+[case testVariableProperty]
+class A:
+    @property
+    def a(self) -> bool: pass
+class B(A):
+    a = None  # type: bool
+class C(A):
+    a = True
+class D(A):
+    a = 1
+[builtins fixtures/property.pyi]
+[out]
+main:9: error: Incompatible types in assignment (expression has type "int", base class "A" defined the type as "bool")
+
+[case testVariableOverwriteAny]
+from typing import Any
+class A:
+    a = 1
+class B(A):
+    a = 'x'  # type: Any
+[out]
+
+[case testInstanceMethodOverwrite]
+class B():
+    def n(self, a: int) -> None: pass
+class C(B):
+    def m(self, a: int) -> None: pass
+    n = m
+[out]
+
+[case testInstanceMethodOverwriteError]
+class B():
+    def n(self, a: int) -> None: pass
+class C(B):
+    def m(self, a: str) -> None: pass
+    n = m
+[out]
+main:5: error: Incompatible types in assignment (expression has type Callable[[str], None], base class "B" defined the type as Callable[[int], None])
+
+[case testInstanceMethodOverwriteTypevar]
+from typing import Generic, TypeVar
+T = TypeVar("T")
+class B(Generic[T]):
+    def n(self, a: T) -> None: pass
+class C(B[int]):
+    def m(self, a: int) -> None: pass
+    n = m
+
+[case testInstanceMethodOverwriteTwice]
+class I:
+    def foo(self) -> None: pass
+class A(I):
+    def foo(self) -> None: pass
+class B(A):
+    def bar(self) -> None: pass
+    foo = bar
+class C(B):
+    def bar(self) -> None: pass
+    foo = bar
+
+[case testClassMethodOverwrite]
+class B():
+    @classmethod
+    def n(self, a: int) -> None: pass
+class C(B):
+    @classmethod
+    def m(self, a: int) -> None: pass
+    n = m
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testClassMethodOverwriteError]
+class B():
+    @classmethod
+    def n(self, a: int) -> None: pass
+class C(B):
+    @classmethod
+    def m(self, a: str) -> None: pass
+    n = m
+[builtins fixtures/classmethod.pyi]
+[out]
+main:7: error: Incompatible types in assignment (expression has type Callable[[str], None], base class "B" defined the type as Callable[[int], None])
+
+[case testClassSpec]
+from typing import Callable
+class A():
+    b = None  # type: Callable[[A, int], int]
+class B(A):
+    def c(self, a: int) -> int: pass
+    b = c
+
+[case testClassSpecError]
+from typing import Callable
+class A():
+    b = None  # type: Callable[[A, int], int]
+class B(A):
+    def c(self, a: str) -> int: pass
+    b = c
+[out]
+main:6: error: Incompatible types in assignment (expression has type Callable[[str], int], base class "A" defined the type as Callable[[int], int])
+
+[case testClassStaticMethod]
+class A():
+    @staticmethod
+    def a(a: int) -> None: pass
+class B(A):
+    @staticmethod
+    def b(a: str) -> None: pass
+    a = b
+[builtins fixtures/staticmethod.pyi]
+[out]
+main:7: error: Incompatible types in assignment (expression has type Callable[[str], None], base class "A" defined the type as Callable[[int], None])
+
+[case testClassStaticMethodIndirect]
+class A():
+    @staticmethod
+    def a(a: int) -> None: pass
+    c = a
+class B(A):
+    @staticmethod
+    def b(a: str) -> None: pass
+    c = b
+[builtins fixtures/staticmethod.pyi]
+[out]
+main:8: error: Incompatible types in assignment (expression has type Callable[[str], None], base class "A" defined the type as Callable[[int], None])
+
+[case testTempNode]
+class A():
+    def a(self) -> None: pass
+class B(A):
+    def b(self) -> None: pass
+    a = c = b
+
+[case testListObject]
+from typing import List
+class A:
+    x = []  # type: List[object]
+class B(A):
+    x = [1]
+[builtins fixtures/list.pyi]
+
+[case testClassMemberObject]
+class A:
+    x = object()
+class B(A):
+    x = 1
+class C(B):
+    x = ''
+[out]
+main:6: error: Incompatible types in assignment (expression has type "str", base class "B" defined the type as "int")
+
+[case testSlots]
+class A:
+    __slots__ = ("a")
+class B(A):
+    __slots__ = ("a", "b")
+
+[case testClassOrderOfError]
+class A:
+    x = 1
+class B(A):
+    x = "a"
+class C(B):
+    x = object()
+[out]
+main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
+main:6: error: Incompatible types in assignment (expression has type "object", base class "B" defined the type as "str")
+
+[case testClassOneErrorPerLine]
+class A:
+  x = 1
+class B(A):
+  x = ""
+  x = 1.0
+[out]
+main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
+main:5: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int")
+
+[case testClassIgnoreType]
+class A:
+    x = 0
+class B(A):
+    x = ''  # type: ignore
+class C(B):
+    x = ''
+[out]
diff --git a/test-data/unit/check-columns.test b/test-data/unit/check-columns.test
new file mode 100644
index 0000000..7e21ca0
--- /dev/null
+++ b/test-data/unit/check-columns.test
@@ -0,0 +1,68 @@
+[case testColumnsSyntaxError]
+# flags: --show-column-numbers
+1 +
+[out]
+main:2:4: error: invalid syntax
+
+
+[case testColumnsNestedFunctions]
+# flags: --show-column-numbers
+import typing
+def f() -> 'A':
+    def g() -> 'B':
+        return A() # fail
+    return B() # fail
+class A: pass
+class B: pass
+[out]
+main:5:8: error: Incompatible return value type (got "A", expected "B")
+main:6:4: error: Incompatible return value type (got "B", expected "A")
+
+[case testColumnsNestedFunctionsWithFastParse]
+# flags: --show-column-numbers --fast-parser
+import typing
+def f() -> 'A':
+    def g() -> 'B':
+        return A() # fail
+    return B() # fail
+class A: pass
+class B: pass
+[out]
+main:5:8: error: Incompatible return value type (got "A", expected "B")
+main:6:4: error: Incompatible return value type (got "B", expected "A")
+
+
+[case testColumnsMethodDefaultArgumentsAndSignatureAsComment]
+# flags: --show-column-numbers
+import typing
+class A:
+    def f(self, x = 1, y = 'hello'): # type: (int, str) -> str
+        pass
+A().f()
+A().f(1)
+A().f('') # E:0: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+A().f(1, 1) # E:0: Argument 2 to "f" of "A" has incompatible type "int"; expected "str"
+A().f(1, 'hello', 'hi') # E:0: Too many arguments for "f" of "A"
+
+[case testColumnsMultipleStatementsPerLine]
+# flags: --show-column-numbers
+x = 1
+y = 'hello'
+x = 2; y = x; y += 1
+[out]
+main:4:7: error: Incompatible types in assignment (expression has type "int", variable has type "str")
+main:4:14: error: Unsupported operand types for + ("str" and "int")
+
+[case testColumnsSimpleIsinstance]
+# flags: --show-column-numbers
+import typing
+def f(x: object, n: int, s: str) -> None:
+    n = x # E:4: Incompatible types in assignment (expression has type "object", variable has type "int")
+    if isinstance(x, int):
+        n = x
+        s = x # E:8: Incompatible types in assignment (expression has type "int", variable has type "str")
+    n = x # E:4: Incompatible types in assignment (expression has type "object", variable has type "int")
+[builtins fixtures/isinstance.pyi]
+[out]
+
+
diff --git a/test-data/unit/check-dynamic-typing.test b/test-data/unit/check-dynamic-typing.test
new file mode 100644
index 0000000..27f8bee
--- /dev/null
+++ b/test-data/unit/check-dynamic-typing.test
@@ -0,0 +1,676 @@
+-- Assignment
+-- ----------
+
+
+[case testAssignmentWithDynamic]
+from typing import Any
+d = None # type: Any
+a = None # type: A
+
+a = d # Everything ok
+d = a
+d = d
+d.x = a
+d.x = d
+
+class A: pass
+
+[case testMultipleAssignmentWithDynamic]
+from typing import Any
+d = None # type: Any
+a, b = None, None # type: (A, B)
+
+d, a = b, b    # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+d, d = d, d, d # E: Too many values to unpack (2 expected, 3 provided)
+
+a, b = d, d
+d, d = a, b
+a, b = d
+s, t = d
+
+class A: pass
+class B: pass
+
+
+-- Expressions
+-- -----------
+
+
+[case testCallingFunctionWithDynamicArgumentTypes]
+from typing import Any
+a, b = None, None # type: (A, B)
+
+b = f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+a = f(a)
+a = f(b)
+a = f(None)
+a = f(f)
+
+def f(x: Any) -> 'A':
+    pass
+
+class A: pass
+class B: pass
+
+[case testCallingWithDynamicReturnType]
+from typing import Any
+a, b = None, None # type: (A, B)
+
+a = f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
+
+a = f(a)
+b = f(a)
+
+def f(x: 'A') -> Any:
+    pass
+
+class A: pass
+class B: pass
+
+[case testBinaryOperationsWithDynamicLeftOperand]
+from typing import Any
+d = None # type: Any
+a = None # type: A
+c = None # type: C
+b = None # type: bool
+n = 0
+
+d in a  # E: Unsupported right operand type for in ("A")
+d and a
+d or a
+c = d and b # Unintuitive type inference?
+c = d or b  # Unintuitive type inference?
+
+c = d + a
+c = d - a
+c = d * a
+c = d / a
+c = d // a
+c = d % a
+c = d ** a
+b = d == a
+b = d != a
+b = d < a
+b = d <= a
+b = d > a
+b = d >= a
+b = d in c
+b = d and b
+b = d or b
+
+class A: pass
+class C:
+    def __contains__(self, a: A) -> bool:
+        pass
+[file builtins.py]
+class object:
+  def __init__(self): pass
+class bool: pass
+class int: pass
+class type: pass
+class function: pass
+class str: pass
+
+[case testBinaryOperationsWithDynamicAsRightOperand]
+from typing import Any
+d = None # type: Any
+a = None # type: A
+c = None # type: C
+b = None # type: bool
+n = 0
+
+a and d
+a or d
+c = a in d
+c = b and d # Unintuitive type inference?
+c = b or d  # Unintuitive type inference?
+b = a + d
+b = a / d
+
+c = a + d
+c = a - d
+c = a * d
+c = a / d
+c = a // d
+c = a % d
+c = a ** d
+b = a in d
+b = b and d
+b = b or d
+
+class A:
+    def __add__(self, a: 'A') -> 'C':
+        pass
+    def __sub__(self, a: 'A') -> 'C':
+        pass
+    def __mul__(self, a: 'A') -> 'C':
+        pass
+    def __truediv__(self, a: 'A') -> 'C':
+        pass
+    def __floordiv__(self, a: 'A') -> 'C':
+        pass
+    def __mod__(self, a: 'A') -> 'C':
+        pass
+    def __pow__(self, a: 'A') -> 'C':
+        pass
+    def _lt(self, a: 'A') -> bool:
+        pass
+    def _gt(self, a: 'A') -> bool:
+        pass
+
+class C: pass
+[file builtins.py]
+class object:
+  def __init__(self): pass
+class bool: pass
+class int: pass
+class type: pass
+class function: pass
+class str: pass
+
+[case testDynamicWithUnaryExpressions]
+from typing import Any
+d = None # type: Any
+a = None # type: A
+b = None # type: bool
+a = not d # E: Incompatible types in assignment (expression has type "bool", variable has type "A")
+b = not d
+a = -d
+class A: pass
+[builtins fixtures/bool.pyi]
+[out]
+
+[case testDynamicWithMemberAccess]
+from typing import Any
+d = None # type: Any
+a = None # type: A
+
+a = d.foo(a()) # E: "A" not callable
+
+a = d.x
+a = d.foo(a, a)
+d.x = a
+d.x.y.z  # E: "A" has no attribute "y"
+
+class A: pass
+[out]
+
+[case testIndexingWithDynamic]
+from typing import Any
+d = None # type: Any
+a = None # type: A
+
+a = d[a()] # E: "A" not callable
+d[a()] = a # E: "A" not callable
+
+a = d[a]
+d[a] = a
+d[a], d[a] = a, a
+
+class A: pass
+
+[case testTupleExpressionsWithDynamci]
+from typing import Tuple, Any
+t2 = None # type: Tuple[A, A]
+d = None # type: Any
+
+t2 = (d, d, d)  # E: Incompatible types in assignment (expression has type "Tuple[Any, Any, Any]", variable has type "Tuple[A, A]")
+t2 = (d, d)
+
+class A: pass
+[builtins fixtures/tuple.pyi]
+
+[case testCastsWithDynamicType]
+from typing import Any, cast
+class A: pass
+class B: pass
+d = None # type: Any
+a = None # type: A
+b = None # type: B
+b = cast(A, d) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = cast(A, d)
+b = cast(Any, d)
+a = cast(Any, f())
+def f() -> None: pass
+
+[case testCompatibilityOfDynamicWithOtherTypes]
+from typing import Any, Tuple
+d = None # type: Any
+t = None # type: Tuple[A, A]
+# TODO: callable types, overloaded functions
+
+d = None # All ok
+d = t
+d = g
+d = A
+t = d
+f = d
+
+def g(a: 'A') -> None:
+    pass
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+
+
+-- Statements
+-- ----------
+
+
+[case testDynamicCondition]
+from typing import Any
+d = None # type: Any
+while d:
+    pass
+if d:
+    pass
+elif d:
+    pass
+[builtins fixtures/bool.pyi]
+
+[case testRaiseWithDynamic]
+from typing import Any
+d = None # type: Any
+raise d
+[builtins fixtures/exception.pyi]
+
+[case testReturnWithDynamic]
+from typing import Any
+d = None # type: Any
+
+def f() -> None:
+    return d # Ok
+
+def g() -> 'A':
+    return d # Ok
+
+class A: pass
+
+
+-- Implicit dynamic types for functions
+-- ------------------------------------
+
+
+[case testImplicitGlobalFunctionSignature]
+from typing import Any, Callable
+x = None # type: Any
+a = None # type: A
+g = None # type: Callable[[], None]
+h = None # type: Callable[[A], None]
+
+f()     # E: Too few arguments for "f"
+f(x, x) # E: Too many arguments for "f"
+g = f   # E: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type Callable[[], None])
+f(a)
+f(x)
+a = f(a)
+h = f
+
+def f(x): pass
+
+class A: pass
+
+[case testImplicitGlobalFunctionSignatureWithDifferentArgCounts]
+from typing import Callable
+g0 = None # type: Callable[[], None]
+g1 = None # type: Callable[[A], None]
+g2 = None # type: Callable[[A, A], None]
+a = None # type: A
+
+g1 = f0 # E: Incompatible types in assignment (expression has type Callable[[], Any], variable has type Callable[[A], None])
+g2 = f0 # E: Incompatible types in assignment (expression has type Callable[[], Any], variable has type Callable[[A, A], None])
+g0 = f2 # E: Incompatible types in assignment (expression has type Callable[[Any, Any], Any], variable has type Callable[[], None])
+g1 = f2 # E: Incompatible types in assignment (expression has type Callable[[Any, Any], Any], variable has type Callable[[A], None])
+
+g0 = g0
+g2 = f2
+f0()
+f2(a, a)
+
+def f0(): pass
+
+def f2(x, y): pass
+
+class A: pass
+
+[case testImplicitGlobalFunctionSignatureWithDefaultArgs]
+from typing import Callable
+a, b = None, None # type: (A, B)
+
+g0 = None # type: Callable[[], None]
+g1 = None # type: Callable[[A], None]
+g2 = None # type: Callable[[A, A], None]
+g3 = None # type: Callable[[A, A, A], None]
+g4 = None # type: Callable[[A, A, A, A], None]
+
+f01(a, a)       # Fail
+f13()           # Fail
+f13(a, a, a, a) # Fail
+g2 = f01 # Fail
+g0 = f13 # Fail
+g4 = f13 # Fail
+
+f01()
+f01(a)
+f13(a)
+f13(a, a)
+f13(a, a, a)
+
+g0 = f01
+g1 = f01
+g1 = f13
+g2 = f13
+g3 = f13
+
+def f01(x = b): pass
+def f13(x, y = b, z = b): pass
+
+class A: pass
+class B: pass
+[out]
+main:10: error: Too many arguments for "f01"
+main:11: error: Too few arguments for "f13"
+main:12: error: Too many arguments for "f13"
+main:13: error: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type Callable[[A, A], None])
+main:14: error: Incompatible types in assignment (expression has type Callable[[Any, Any, Any], Any], variable has type Callable[[], None])
+main:15: error: Incompatible types in assignment (expression has type Callable[[Any, Any, Any], Any], variable has type Callable[[A, A, A, A], None])
+
+[case testSkipTypeCheckingWithImplicitSignature]
+
+a = None # type: A
+def f():
+    a()
+def g(x):
+    a()
+    a.x
+    a + a
+    if a():
+        a()
+class A: pass
+[builtins fixtures/bool.pyi]
+
+[case testSkipTypeCheckingWithImplicitSignatureAndDefaultArgs]
+
+a = None # type: A
+def f(x=a()):
+    a()
+def g(x, y=a, z=a()):
+    a()
+class A: pass
+
+[case testImplicitMethodSignature]
+from typing import Callable
+g0 = None # type: Callable[[], None]
+g1 = None # type: Callable[[A], None]
+g2 = None # type: Callable[[A, A], None]
+a = None # type: A
+
+g0 = a.f # E: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type Callable[[], None])
+g2 = a.f # E: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type Callable[[A, A], None])
+a = a.f  # E: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type "A")
+
+class A:
+    def g(self) -> None:
+        a = self.f(a)
+    def f(self, x): pass
+
+g1 = a.f
+a = a.f(a)
+
+[case testSkipTypeCheckingImplicitMethod]
+
+a = None # type: A
+class A:
+    def f(self):
+        a()
+    def g(self, x, y=a()):
+        a()
+
+[case testImplicitInheritedMethod]
+from typing import Callable
+g0 = None # type: Callable[[], None]
+g1 = None # type: Callable[[A], None]
+a = None # type: A
+
+g0 = a.f # E: Incompatible types in assignment (expression has type Callable[[Any], Any], variable has type Callable[[], None])
+
+g1 = a.f
+a = a.f(a)
+
+class B:
+    def f(self, x):
+        pass
+class A(B):
+    def g(self) -> None:
+        a = self.f(a)
+
+[case testEmptyReturnWithImplicitSignature]
+import typing
+def f():
+    return
+class A:
+    def g(self):
+        return
+
+[case testVarArgsWithImplicitSignature]
+from typing import Any
+o = None # type: Any
+def f(x, *a): pass
+f() # E: Too few arguments for "f"
+f(o)
+f(o, o)
+f(o, o, o)
+[builtins fixtures/list.pyi]
+
+
+-- Implicit types for constructors
+-- -------------------------------
+
+
+[case testInitMethodWithImplicitSignature]
+from typing import Callable
+f1 = None # type: Callable[[A], A]
+f2 = None # type: Callable[[A, A], A]
+a = None # type: A
+
+A(a)   # Fail
+f1 = A # Fail
+
+A(a, a)
+f2 = A
+
+class A:
+  def __init__(self, a, b): pass
+[out]
+main:6: error: Too few arguments for "A"
+main:7: error: Incompatible types in assignment (expression has type "A", variable has type Callable[[A], A])
+
+[case testUsingImplicitTypeObjectWithIs]
+
+t = None # type: type
+t = A
+t = B
+
+class A: pass
+class B:
+    def __init__(self): pass
+
+
+-- Type compatibility
+-- ------------------
+
+
+[case testTupleTypeCompatibility]
+from typing import Any, Tuple
+t1 = None # type: Tuple[Any, A]
+t2 = None # type: Tuple[A, Any]
+t3 = None # type: Tuple[Any, Any]
+t4 = None # type: Tuple[A, A]
+t5 = None # type: Tuple[Any, Any, Any]
+
+t3 = t5 # E: Incompatible types in assignment (expression has type "Tuple[Any, Any, Any]", variable has type "Tuple[Any, Any]")
+t5 = t4 # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[Any, Any, Any]")
+
+t1 = t1
+t1 = t2
+t1 = t3
+t1 = t4
+t2 = t1
+t2 = t3
+t2 = t4
+t3 = t1
+t3 = t2
+t3 = t4
+t4 = t1
+t4 = t2
+t4 = t3
+
+class A: pass
+[builtins fixtures/tuple.pyi]
+
+[case testFunctionTypeCompatibilityAndReturnTypes]
+from typing import Any, Callable
+f1 = None # type: Callable[[], Any]
+f11 = None # type: Callable[[], Any]
+f2 = None # type: Callable[[], A]
+f3 = None # type: Callable[[], None]
+
+f2 = f3 # E: Incompatible types in assignment (expression has type Callable[[], None], variable has type Callable[[], A])
+
+f1 = f2
+f1 = f3
+f2 = f11
+f3 = f11
+
+class A: pass
+
+[case testFunctionTypeCompatibilityAndArgumentTypes]
+from typing import Any, Callable
+f1 = None # type: Callable[[A, Any], None]
+f2 = None # type: Callable[[Any, A], None]
+f3 = None # type: Callable[[A, A], None]
+
+f1 = f1
+f1 = f2
+f1 = f3
+
+f2 = f1
+f2 = f2
+f2 = f3
+
+f3 = f1
+f3 = f2
+f3 = f3
+
+class A: pass
+
+[case testFunctionTypeCompatibilityAndArgumentCounts]
+from typing import Any, Callable
+f1 = None # type: Callable[[Any], None]
+f2 = None # type: Callable[[Any, Any], None]
+
+f1 = f2 # E: Incompatible types in assignment (expression has type Callable[[Any, Any], None], variable has type Callable[[Any], None])
+
+
+-- Overriding
+-- ----------
+
+
+[case testOverridingMethodWithDynamicTypes]
+from typing import Any
+a, b = None, None # type: (A, B)
+
+b.f(b) # E: Argument 1 to "f" of "B" has incompatible type "B"; expected "A"
+a = a.f(b)
+
+class B:
+    def f(self, x: 'A') -> 'B':
+        pass
+    def g(self, x: 'B') -> None:
+        pass
+class A(B):
+    def f(self, x: Any) -> Any:
+        pass
+    def g(self, x: Any) -> None:
+        pass
+
+[case testOverridingMethodWithImplicitDynamicTypes]
+
+a, b = None, None # type: (A, B)
+
+b.f(b) # E: Argument 1 to "f" of "B" has incompatible type "B"; expected "A"
+a = a.f(b)
+
+class B:
+    def f(self, x: 'A') -> 'B':
+        pass
+    def g(self, x: 'B') -> None:
+        pass
+class A(B):
+    def f(self, x):
+        pass
+    def g(self, x):
+        pass
+
+[case testOverridingMethodAcrossHierarchy]
+import typing
+class C:
+    def f(self, a: 'A') -> None: pass
+class B(C):
+    def f(self, a): pass
+class A(B):
+    def f(self, a: 'D') -> None: # E: Argument 1 of "f" incompatible with supertype "C"
+        pass
+class D: pass
+[out]
+
+[case testInvalidOverrideArgumentCountWithImplicitSignature1]
+import typing
+class B:
+    def f(self, x: A) -> None: pass
+class A(B):
+    def f(self, x, y): # dynamic function not type checked
+        x()
+[out]
+
+[case testInvalidOverrideArgumentCountWithImplicitSignature2]
+import typing
+class B:
+    def f(self, x, y): pass
+class A(B):
+    def f(self, x: 'A') -> None: # E: Signature of "f" incompatible with supertype "B"
+        pass
+[out]
+
+[case testInvalidOverrideArgumentCountWithImplicitSignature3]
+import typing
+class B:
+    def f(self, x: A) -> None: pass
+class A(B):
+    def f(self, x, y) -> None: # E: Signature of "f" incompatible with supertype "B"
+        x()
+[out]
+
+
+-- Don't complain about too few/many arguments in dynamic functions
+-- ----------------------------------------------------------------
+
+[case testTooManyArgsInDynamic]
+def f() -> None: pass
+def g():
+    f(1) # Silent
+[out]
+
+[case testTooFewArgsInDynamic]
+def f(a: int) -> None: pass
+def g():
+    f() # Silent
+[out]
+
+[case testJustRightInDynamic]
+def f(a: int) -> None: pass
+def g():
+    f('') # Silent
+[out]
diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test
new file mode 100644
index 0000000..8d22174
--- /dev/null
+++ b/test-data/unit/check-expressions.test
@@ -0,0 +1,1652 @@
+-- Test cases for simple expressions.
+--
+-- See also:
+--  * check-functions.test contains test cases for calls.
+--  * check-varargs.test contains test cases for *args.
+--  * check-dynamic.test contains test cases related to 'Any' type.
+--  * check-generics.test contains test cases for generic values.
+
+
+-- None expression
+-- ---------------
+
+
+[case testNoneAsRvalue]
+import typing
+a = None # type: A
+class A: pass
+[out]
+
+[case testNoneAsArgument]
+import typing
+def f(x: 'A', y: 'B') -> None: pass
+f(None, None)
+class A: pass
+class B(A): pass
+[out]
+
+
+-- Simple expressions
+-- ------------------
+
+
+[case testIntLiteral]
+a = 0
+b = None # type: A
+b = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "A")
+a = 1
+class A:
+    pass
+
+[case testStrLiteral]
+a = ''
+b = None # type: A
+b = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "A")
+a = 'x'
+a = r"x"
+a = """foo"""
+class A:
+    pass
+
+[case testFloatLiteral]
+a = 0.0
+b = None # type: A
+b = 1.1 # E: Incompatible types in assignment (expression has type "float", variable has type "A")
+a = 1.1
+class A:
+    pass
+[file builtins.py]
+class object:
+    def __init__(self): pass
+class type: pass
+class function: pass
+class float: pass
+class str: pass
+
+[case testComplexLiteral]
+a = 0.0j
+b = None # type: A
+b = 1.1j # E: Incompatible types in assignment (expression has type "complex", variable has type "A")
+a = 1.1j
+class A:
+    pass
+[file builtins.py]
+class object:
+    def __init__(self): pass
+class type: pass
+class function: pass
+class complex: pass
+class str: pass
+
+[case testBytesLiteral]
+b, a = None, None # type: (bytes, A)
+b = b'foo'
+b = br"foo"
+b = b'''foo'''
+a = b'foo' # E: Incompatible types in assignment (expression has type "bytes", variable has type "A")
+class A: pass
+[file builtins.py]
+class object:
+    def __init__(self): pass
+class type: pass
+class tuple: pass
+class function: pass
+class bytes: pass
+class str: pass
+
+[case testUnicodeLiteralInPython3]
+s = None  # type: str
+s = u'foo'
+b = None  # type: bytes
+b = u'foo' # E: Incompatible types in assignment (expression has type "str", variable has type "bytes")
+[builtins fixtures/primitives.pyi]
+
+
+-- Binary operators
+-- ----------------
+
+
+[case testAdd]
+
+a, b, c = None, None, None # type: (A, B, C)
+c = a + c  # Fail
+a = a + b  # Fail
+c = b + a  # Fail
+c = a + b
+
+class A:
+    def __add__(self, x: 'B') -> 'C': pass
+class B: pass
+class C: pass
+[out]
+main:3: error: Unsupported operand types for + ("A" and "C")
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
+main:5: error: Unsupported left operand type for + ("B")
+[case testAdd]
+
+a, b, c = None, None, None # type: (A, B, C)
+c = a + c  # Fail
+a = a + b  # Fail
+c = b + a  # Fail
+c = a + b
+
+class A:
+    def __add__(self, x: 'B') -> 'C':
+        pass
+class B:
+    pass
+class C:
+    pass
+[out]
+main:3: error: Unsupported operand types for + ("A" and "C")
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
+main:5: error: Unsupported left operand type for + ("B")
+
+[case testSub]
+
+a, b, c = None, None, None # type: (A, B, C)
+c = a - c  # Fail
+a = a - b  # Fail
+c = b - a  # Fail
+c = a - b
+
+class A:
+    def __sub__(self, x: 'B') -> 'C':
+        pass
+class B:
+    pass
+class C:
+    pass
+[out]
+main:3: error: Unsupported operand types for - ("A" and "C")
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
+main:5: error: Unsupported left operand type for - ("B")
+
+[case testMul]
+
+a, b, c = None, None, None # type: (A, B, C)
+c = a * c  # Fail
+a = a * b  # Fail
+c = b * a  # Fail
+c = a * b
+
+class A:
+    def __mul__(self, x: 'B') -> 'C':
+        pass
+class B:
+    pass
+class C:
+    pass
+[out]
+main:3: error: Unsupported operand types for * ("A" and "C")
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
+main:5: error: Unsupported left operand type for * ("B")
+
+[case testMatMul]
+a, b, c = None, None, None # type: (A, B, C)
+c = a @ c  # E: Unsupported operand types for @ ("A" and "C")
+a = a @ b  # E: Incompatible types in assignment (expression has type "C", variable has type "A")
+c = b @ a  # E: Unsupported left operand type for @ ("B")
+c = a @ b
+
+class A:
+    def __matmul__(self, x: 'B') -> 'C':
+        pass
+class B:
+    pass
+class C:
+    pass
+
+[case testDiv]
+
+a, b, c = None, None, None # type: (A, B, C)
+c = a / c  # Fail
+a = a / b  # Fail
+c = b / a  # Fail
+c = a / b
+
+class A:
+    def __truediv__(self, x: 'B') -> 'C':
+        pass
+class B:
+    pass
+class C:
+    pass
+[out]
+main:3: error: Unsupported operand types for / ("A" and "C")
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
+main:5: error: Unsupported left operand type for / ("B")
+
+[case testIntDiv]
+
+a, b, c = None, None, None # type: (A, B, C)
+c = a // c  # Fail
+a = a // b  # Fail
+c = b // a  # Fail
+c = a // b
+
+class A:
+    def __floordiv__(self, x: 'B') -> 'C':
+        pass
+class B:
+    pass
+class C:
+    pass
+[out]
+main:3: error: Unsupported operand types for // ("A" and "C")
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
+main:5: error: Unsupported left operand type for // ("B")
+
+[case testMod]
+
+a, b, c = None, None, None # type: (A, B, C)
+c = a % c  # Fail
+a = a % b  # Fail
+c = b % a  # Fail
+c = a % b
+
+class A:
+    def __mod__(self, x: 'B') -> 'C':
+        pass
+class B:
+    pass
+class C:
+    pass
+[out]
+main:3: error: Unsupported operand types for % ("A" and "C")
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
+main:5: error: Unsupported left operand type for % ("B")
+
+[case testPow]
+
+a, b, c = None, None, None # type: (A, B, C)
+c = a ** c  # Fail
+a = a ** b  # Fail
+c = b ** a  # Fail
+c = a ** b
+
+class A:
+    def __pow__(self, x: 'B') -> 'C':
+        pass
+class B:
+    pass
+class C:
+    pass
+[out]
+main:3: error: Unsupported operand types for ** ("A" and "C")
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
+main:5: error: Unsupported left operand type for ** ("B")
+
+[case testMiscBinaryOperators]
+
+a, b = None, None # type: (A, B)
+b = a & a  # Fail
+b = a | b  # Fail
+b = a ^ a  # Fail
+b = a << b # Fail
+b = a >> a # Fail
+
+b = a & b
+b = a | a
+b = a ^ b
+b = a << a
+b = a >> b
+class A:
+  def __and__(self, x: 'B') -> 'B': pass
+  def __or__(self, x: 'A') -> 'B': pass
+  def __xor__(self, x: 'B') -> 'B': pass
+  def __lshift__(self, x: 'A') -> 'B': pass
+  def __rshift__(self, x: 'B') -> 'B': pass
+class B: pass
+[out]
+main:3: error: Unsupported operand types for & ("A" and "A")
+main:4: error: Unsupported operand types for | ("A" and "B")
+main:5: error: Unsupported operand types for ^ ("A" and "A")
+main:6: error: Unsupported operand types for << ("A" and "B")
+main:7: error: Unsupported operand types for >> ("A" and "A")
+
+[case testBooleanAndOr]
+
+a, b = None, None # type: (A, bool)
+b = b and b
+b = b or b
+b = b and a # E: Incompatible types in assignment (expression has type "Union[bool, A]", variable has type "bool")
+b = a and b # E: Incompatible types in assignment (expression has type "Union[A, bool]", variable has type "bool")
+b = b or a  # E: Incompatible types in assignment (expression has type "Union[bool, A]", variable has type "bool")
+b = a or b  # E: Incompatible types in assignment (expression has type "Union[A, bool]", variable has type "bool")
+class A: pass
+
+[builtins fixtures/bool.pyi]
+
+[case testRestrictedTypeAnd]
+
+b = None # type: bool
+i = None # type: str
+j = not b and i
+if j:
+    reveal_type(j) # E: Revealed type is 'builtins.str'
+[builtins fixtures/bool.pyi]
+
+[case testRestrictedTypeOr]
+
+b = None # type: bool
+i = None # type: str
+j = b or i
+if not j:
+    reveal_type(j) # E: Revealed type is 'builtins.str'
+[builtins fixtures/bool.pyi]
+
+[case testAndOr]
+
+s = ""
+b = bool()
+reveal_type(s and b or b)  # E: Revealed type is 'builtins.bool'
+[builtins fixtures/bool.pyi]
+
+[case testNonBooleanOr]
+
+c, d, b = None, None, None # type: (C, D, bool)
+c = c or c
+c = c or d
+c = d or c
+b = c or c # E: Incompatible types in assignment (expression has type "C", variable has type "bool")
+d = c or d # E: Incompatible types in assignment (expression has type "C", variable has type "D")
+d = d or c # E: Incompatible types in assignment (expression has type "C", variable has type "D")
+class C: pass
+class D(C): pass
+[builtins fixtures/bool.pyi]
+
+[case testInOperator]
+from typing import Iterator, Iterable, Any
+a, b, c, d, e = None, None, None, None, None # type: (A, B, bool, D, Any)
+c = c in a  # Fail
+a = b in a  # Fail
+c = a in b  # Fail
+c = b in d  # Fail
+c = b in a
+c = a in d
+c = e in d
+c = a in e
+
+class A:
+    def __contains__(self, x: 'B') -> bool: pass
+class B: pass
+class D(Iterable[A]):
+    def __iter__(self) -> Iterator[A]: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:3: error: Unsupported operand types for in ("bool" and "A")
+main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+main:5: error: Unsupported right operand type for in ("B")
+main:6: error: Unsupported operand types for in ("B" and "D")
+
+[case testNotInOperator]
+from typing import Iterator, Iterable, Any
+a, b, c, d, e = None, None, None, None, None # type: (A, B, bool, D, Any)
+c = c not in a  # Fail
+a = b not in a  # Fail
+c = a not in b  # Fail
+c = b not in d  # Fail
+c = b not in a
+c = a not in d
+c = e in d
+c = a in e
+
+class A:
+    def __contains__(self, x: 'B') -> bool: pass
+class B: pass
+class D(Iterable[A]):
+    def __iter__(self) -> Iterator[A]: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:3: error: Unsupported operand types for in ("bool" and "A")
+main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+main:5: error: Unsupported right operand type for in ("B")
+main:6: error: Unsupported operand types for in ("B" and "D")
+
+[case testNonBooleanContainsReturnValue]
+
+a, b = None, None # type: (A, bool)
+b = a not in a
+b = a in a
+
+class A:
+  def __contains__(self, x: 'A') -> object: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:4: error: Incompatible types in assignment (expression has type "object", variable has type "bool")
+
+[case testEq]
+
+a, b = None, None # type: (A, bool)
+a = a == b # Fail
+a = a != b # Fail
+b = a == b
+b = a != b
+
+class A:
+  def __eq__(self, o: object) -> bool: pass
+  def __ne__(self, o: object) -> bool: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+
+[case testLtAndGt]
+
+a, b, bo = None, None, None # type: (A, B, bool)
+a = a < b # Fail
+a = a > b # Fail
+bo = a < b
+bo = a > b
+
+class A:
+    def __lt__(self, o: 'B') -> bool: pass
+    def __gt__(self, o: 'B') -> bool: pass
+class B:
+    def __lt__(self, o: 'B') -> bool: pass
+    def __gt__(self, o: 'B') -> bool: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+
+[case testCmp_python2]
+
+a, b, c, bo = None, None, None, None # type: (A, B, C, bool)
+bo = a == a  # E: Unsupported operand types for == ("A" and "A")
+bo = a != a  # E: Argument 1 to "__cmp__" of "A" has incompatible type "A"; expected "B"
+bo = a < b
+bo = a > b
+bo = b <= b
+bo = b <= c
+bo = b >= c  # E: Argument 1 to "__cmp__" of "B" has incompatible type "C"; expected "B"
+bo = a >= b
+bo = c >= b
+bo = c <= b  # E: Argument 1 to "__cmp__" of "C" has incompatible type "B"; expected "A"
+bo = a == c
+bo = b == c  # E: Unsupported operand types for == ("C" and "B")
+
+class A:
+    def __cmp__(self, o):
+      # type: ('B') -> bool
+      pass
+    def __eq__(self, o):
+      # type: ('int') -> bool
+      pass
+class B:
+    def __cmp__(self, o):
+        # type: ('B') -> bool
+        pass
+    def __le__(self, o):
+        # type: ('C') -> bool
+        pass
+class C:
+    def __cmp__(self, o):
+      # type: ('A') -> bool
+      pass
+    def __eq__(self, o):
+      # type: ('int') -> bool
+      pass
+
+[builtins_py2 fixtures/bool.pyi]
+
+[case cmpIgnoredPy3]
+
+a, b, bo = None, None, None # type: (A, B, bool)
+bo = a <= b # E: Unsupported left operand type for <= ("A")
+
+class A:
+    def __cmp__(self, o: 'B') -> bool: pass
+class B:
+    pass
+
+[builtins fixtures/bool.pyi]
+
+[case testLeAndGe]
+
+a, b, bo = None, None, None # type: (A, B, bool)
+a = a <= b # Fail
+a = a >= b # Fail
+bo = a <= b
+bo = a >= b
+
+class A:
+    def __le__(self, o: 'B') -> bool: pass
+    def __ge__(self, o: 'B') -> bool: pass
+class B:
+    def __le__(self, o: 'B') -> bool: pass
+    def __ge__(self, o: 'B') -> bool: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+
+[case testChainedComp]
+
+a, b, bo = None, None, None # type: (A, B, bool)
+a < a < b < b # Fail
+a < b < b < b
+a < a > a < b # Fail
+
+class A:
+    def __lt__(self, o: 'B') -> bool: pass
+    def __gt__(self, o: 'B') -> bool: pass
+class B:
+    def __lt__(self, o: 'B') -> bool: pass
+    def __gt__(self, o: 'B') -> bool: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:3: error: Unsupported operand types for > ("A" and "A")
+main:5: error: Unsupported operand types for > ("A" and "A")
+main:5: error: Unsupported operand types for < ("A" and "A")
+
+
+[case testChainedCompBoolRes]
+
+a, b, bo = None, None, None # type: (A, B, bool)
+bo = a < b < b
+a = a < b < b # Fail
+
+class A:
+    def __lt__(self, o: 'B') -> bool: pass
+    def __gt__(self, o: 'B') -> bool: pass
+class B:
+    def __lt__(self, o: 'B') -> bool: pass
+    def __gt__(self, o: 'B') -> bool: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:4: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+
+
+[case testChainedCompResTyp]
+
+x, y = None, None # type: (X, Y)
+a, b, p, bo = None, None, None, None # type: (A, B, P, bool)
+b = y == y == y
+bo = y == y == y # Fail
+a = x < y
+a = x < y == y # Fail
+p = x < y == y
+
+class P:
+    pass
+class A(P):
+    pass
+class B(P):
+    pass
+
+class X:
+    def __lt__(self, o: 'Y') -> A: pass
+    def __gt__(self, o: 'Y') -> A: pass
+class Y:
+    def __lt__(self, o: 'Y') -> A: pass
+    def __gt__(self, o: 'Y') -> A: pass
+    def __eq__(self, o: 'Y') -> B: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:5: error: Incompatible types in assignment (expression has type "B", variable has type "bool")
+main:7: error: Incompatible types in assignment (expression has type "P", variable has type "A")
+
+
+[case testIs]
+
+a, b = None, None # type: (A, bool)
+a = a is b # Fail
+b = a is b
+b = b is a
+b = a is None
+class A: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+
+[case testIsNot]
+
+a, b = None, None # type: (A, bool)
+a = a is not b # Fail
+b = a is not b
+b = b is not a
+b = a is not None
+class A: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+
+[case testReverseBinaryOperator]
+
+class A:
+    def __add__(self, x: int) -> int: pass
+class B:
+    def __radd__(self, x: A) -> str: pass
+s = None  # type: str
+n = None  # type: int
+n = A() + 1
+s = A() + B()
+n = A() + B() # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testReverseBinaryOperator2]
+
+class A:
+    def __add__(self, x: 'A') -> object: pass
+class B:
+    def __radd__(self, x: A) -> str: pass
+s = None  # type: str
+n = None  # type: int
+s = A() + B()
+n = A() + B() # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testReverseBinaryOperator3]
+
+class N:
+    def __add__(self, x: 'N') -> object: pass
+class A:
+    def __add__(self, x: N) -> int: pass
+class B:
+    def __radd__(self, x: N) -> str: pass
+s = None  # type: str
+s = A() + B() # E: Unsupported operand types for + ("A" and "B")
+
+[case testBinaryOperatorWithAnyRightOperand]
+from typing import Any, cast
+class A: pass
+A() + cast(Any, 1)
+
+[case testReverseComparisonOperator]
+
+class C:
+    def __gt__(self, x: 'A') -> object: pass
+class A:
+    def __lt__(self, x: C) -> int: pass
+class B:
+    def __gt__(self, x: A) -> str: pass
+s = None  # type: str
+n = None  # type: int
+n = A() < C()
+s = A() < B()
+n = A() < B() # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+s = object() < B() # E: Unsupported operand types for > ("B" and "object")
+
+[case testErrorContextAndBinaryOperators]
+import typing
+class A:
+    def __getitem__(self, i: str) -> int: pass
+def f() -> None:
+    A()[1] # Error
+class B:
+    A()[1] # Error
+A()[1] # Error
+[out]
+main:5: error: Invalid index type "int" for "A"; expected type "str"
+main:7: error: Invalid index type "int" for "A"; expected type "str"
+main:8: error: Invalid index type "int" for "A"; expected type "str"
+
+[case testErrorContextAndBinaryOperators2]
+import m
+[file m.py]
+import typing
+class A:
+    def __getitem__(self, i: str) -> int: pass
+def f() -> None:
+    A()[1] # Error
+class B:
+    A()[1] # Error
+A()[1] # Error
+[out]
+tmp/m.py:5: error: Invalid index type "int" for "A"; expected type "str"
+tmp/m.py:7: error: Invalid index type "int" for "A"; expected type "str"
+tmp/m.py:8: error: Invalid index type "int" for "A"; expected type "str"
+
+
+-- Unary operators
+-- ---------------
+
+
+[case testUnaryMinus]
+
+a, b = None, None # type: (A, B)
+a = -a   # Fail
+b = -b   # Fail
+b = -a
+
+class A:
+    def __neg__(self) -> 'B':
+        pass
+class B:
+    pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:4: error: Unsupported operand type for unary - ("B")
+
+[case testUnaryPlus]
+
+a, b = None, None # type: (A, B)
+a = +a   # Fail
+b = +b   # Fail
+b = +a
+
+class A:
+    def __pos__(self) -> 'B':
+        pass
+class B:
+    pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:4: error: Unsupported operand type for unary + ("B")
+
+[case testUnaryNot]
+
+a, b = None, None # type: (A, bool)
+a = not b  # Fail
+b = not a
+b = not b
+class A:
+    pass
+[builtins fixtures/bool.pyi]
+[out]
+main:3: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+
+[case testUnaryBitwiseNeg]
+
+a, b = None, None # type: (A, B)
+a = ~a   # Fail
+b = ~b   # Fail
+b = ~a
+
+class A:
+    def __invert__(self) -> 'B':
+        pass
+class B:
+    pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:4: error: Unsupported operand type for ~ ("B")
+
+
+-- Indexing
+-- --------
+
+
+[case testIndexing]
+
+a, b, c = None, None, None # type: (A, B, C)
+c = a[c]  # Fail
+a = a[b]  # Fail
+c = b[a]  # Fail
+c = a[b]
+
+class A:
+    def __getitem__(self, x: 'B') -> 'C':
+        pass
+class B: pass
+class C: pass
+[out]
+main:3: error: Invalid index type "C" for "A"; expected type "B"
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "A")
+main:5: error: Value of type "B" is not indexable
+
+[case testIndexingAsLvalue]
+
+a, b, c = None, None, None # type: (A, B, C)
+a[c] = c  # Fail
+a[b] = a  # Fail
+b[a] = c  # Fail
+a[b] = c
+
+class A:
+    def __setitem__(self, x: 'B', y: 'C') -> None:
+        pass
+class B:
+    pass
+class C:
+    pass
+[out]
+main:3: error: Invalid index type "C" for "A"; expected type "B"
+main:4: error: Incompatible types in assignment (expression has type "A", target has type "C")
+main:5: error: Unsupported target for indexed assignment
+
+[case testOverloadedIndexing]
+
+from typing import overload
+
+a, b, c = None, None, None  # type: (A, B, C)
+a[b]
+a[c]
+a[1]  # E: No overload variant of "__getitem__" of "A" matches argument types [builtins.int]
+
+i, s = None, None  # type: (int, str)
+i = a[b]
+s = a[b]  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+i = a[c]  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+s = a[c]
+
+class A:
+    @overload
+    def __getitem__(self, x: 'B') -> int:
+        pass
+    @overload
+    def __getitem__(self, x: 'C') -> str:
+        pass
+class B: pass
+class C: pass
+[out]
+
+
+-- Cast expression
+-- ---------------
+
+
+[case testCastExpressions]
+from typing import cast, Any
+class A: pass
+class B: pass
+class C(A): pass
+a, b, c = None, None, None # type: (A, B, C)
+
+a = cast(A, a())       # E: "A" not callable
+a = cast(Any, a())     # E: "A" not callable
+b = cast(A, a)         # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+a = cast(A, b)
+a = cast(A, a)
+c = cast(C, a)
+a = cast(A, c)
+a = cast(Any, b)
+b = cast(Any, a)
+[out]
+
+[case testAnyCast]
+from typing import cast, Any
+a, b = None, None # type: (A, B)
+a = cast(Any, a())     # Fail
+a = cast(Any, b)
+b = cast(Any, a)
+class A: pass
+class B: pass
+[out]
+main:3: error: "A" not callable
+
+
+-- None return type
+-- ----------------
+
+
+[case testNoneReturnTypeBasics]
+
+a, o = None, None # type: (A, object)
+a = f()         # Fail
+o = A().g(a)    # Fail
+A().g(f())      # Fail
+x = f() # type: A # Fail
+f()
+A().g(a)
+
+def f() -> None:
+    pass
+
+class A:
+    def g(self, x: object) -> None:
+        pass
+[out]
+main:3: error: "f" does not return a value
+main:4: error: "g" of "A" does not return a value
+main:5: error: "f" does not return a value
+main:6: error: "f" does not return a value
+
+[case testNoneReturnTypeWithStatements]
+import typing
+if f():   # Fail
+    pass
+elif f(): # Fail
+    pass
+while f(): # Fail
+    pass
+def g() -> object:
+    return f() # Fail
+raise f() # Fail
+
+def f() -> None: pass
+[builtins fixtures/exception.pyi]
+[out]
+main:2: error: "f" does not return a value
+main:4: error: "f" does not return a value
+main:6: error: "f" does not return a value
+main:9: error: "f" does not return a value
+main:10: error: "f" does not return a value
+
+[case testNoneReturnTypeWithExpressions]
+from typing import cast
+a = None # type: A
+[f()]       # E: "f" does not return a value
+f() + a     # E: "f" does not return a value
+a + f()     # E: "f" does not return a value
+f() == a    # E: "f" does not return a value
+a != f()    # E: Unsupported left operand type for != ("A")
+cast(A, f()) # E: "f" does not return a value
+f().foo     # E: "f" does not return a value
+
+def f() -> None: pass
+class A:
+    def __add__(self, x: 'A') -> 'A': pass
+[builtins fixtures/list.pyi]
+
+[case testNoneReturnTypeWithExpressions2]
+
+a, b = None, None # type: (A, bool)
+a < f()    # E: Unsupported left operand type for < ("A")
+f() <= a   # E: "f" does not return a value
+f() in a   # E: Unsupported right operand type for in ("A")
+a in f()   # E: "f" does not return a value
+-f()       # E: "f" does not return a value
+not f()    # E: "f" does not return a value
+f() and b  # E: "f" does not return a value
+b or f()   # E: "f" does not return a value
+
+def f() -> None: pass
+class A:
+    def __add__(self, x: 'A') -> 'A':
+        pass
+[builtins fixtures/bool.pyi]
+
+
+-- Slicing
+-- -------
+
+
+[case testGetSlice]
+
+a, b = None, None # type: (A, B)
+a = a[1:2] # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a = a[1:]  # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a = a[:2]  # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a = a[:]   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+b = a[1:2]
+b = a[1:]
+b = a[:2]
+b = a[:]
+
+class A:
+  def __getitem__(self, s: slice) -> 'B': pass
+class B: pass
+[builtins fixtures/slice.pyi]
+
+[case testSlicingWithInvalidBase]
+
+a = None # type: A
+a[1:2] # E: Invalid index type "slice" for "A"; expected type "int"
+a[:]   # E: Invalid index type "slice" for "A"; expected type "int"
+class A:
+  def __getitem__(self, n: int) -> 'A': pass
+[builtins fixtures/slice.pyi]
+
+[case testSlicingWithNonindexable]
+
+o = None # type: object
+o[1:2] # E: Value of type "object" is not indexable
+o[:]   # E: Value of type "object" is not indexable
+[builtins fixtures/slice.pyi]
+
+[case testNonIntSliceBounds]
+from typing import Any
+a, o = None, None # type: (Any, object)
+a[o:1] # E: Slice index must be an integer or None
+a[1:o] # E: Slice index must be an integer or None
+a[o:]  # E: Slice index must be an integer or None
+a[:o]  # E: Slice index must be an integer or None
+[builtins fixtures/slice.pyi]
+
+[case testNoneSliceBounds]
+from typing import Any
+a = None # type: Any
+a[None:1]
+a[1:None]
+a[None:]
+a[:None]
+[builtins fixtures/slice.pyi]
+
+
+-- String interpolation
+-- --------------------
+
+
+[case testStringInterpolationType]
+from typing import Tuple
+i, f, s, t = None, None, None, None # type: (int, float, str, Tuple[int])
+'%d' % i
+'%f' % f
+'%s' % s
+'%d' % (f,)
+'%d' % (s,) # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]")
+'%d' % t
+'%d' % s  # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]")
+'%f' % s  # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]")
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationSAcceptsAnyType]
+from typing import Any
+i, o, s = None, None, None # type: (int, object, str)
+'%s %s %s' % (i, o, s)
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationCount]
+'%d %d' % 1  # E: Not enough arguments for format string
+'%d %d' % (1, 2)
+'%d %d' % (1, 2, 3)  # E: Not all arguments converted during string formatting
+t = 1, 's'
+'%d %s' % t
+'%s %d' % t  # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]")
+'%d' % t  # E: Not all arguments converted during string formatting
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationWithAnyType]
+from typing import Any
+a = None # type: Any
+'%d %d' % a
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationInvalidPlaceholder]
+'%W' % 1  # E: Unsupported format character 'W'
+
+[case testStringInterpolationWidth]
+'%2f' % 3.14
+'%*f' % 3.14 # E: Not enough arguments for format string
+'%*f' % (4, 3.14)
+'%*f' % (1.1, 3.14) # E: * wants int
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationPrecision]
+'%.2f' % 3.14
+'%.*f' % 3.14 # E: Not enough arguments for format string
+'%.*f' % (4, 3.14)
+'%.*f' % (1.1, 3.14) # E: * wants int
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationWidthAndPrecision]
+'%4.2f' % 3.14
+'%4.*f' % 3.14 # E: Not enough arguments for format string
+'%*.2f' % 3.14 # E: Not enough arguments for format string
+'%*.*f' % 3.14 # E: Not enough arguments for format string
+'%*.*f' % (4, 2, 3.14)
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationFlagsAndLengthModifiers]
+'%04hd' % 1
+'%-.4ld' % 1
+'%+*Ld' % (1, 1)
+'% .*ld' % (1, 1)
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationDoublePercentage]
+'%% %d' % 1
+'%3% %d' % 1
+'%*%' % 1
+'%*% %d' % 1  # E: Not enough arguments for format string
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationC]
+'%c' % 1
+'%c' % 's'
+'%c' % ''  # E: %c requires int or char
+'%c' % 'ab'  # E: %c requires int or char
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationMappingTypes]
+'%(a)d %(b)s' % {'a': 1, 'b': 's'}
+'%(a)d %(b)s' % {'a': 's', 'b': 1}  # E: Incompatible types in string interpolation (expression has type "str", placeholder with key 'a' has type "Union[int, float]")
+[builtins fixtures/primitives.pyi]
+
+[case testStringInterpolationMappingKeys]
+'%()d' % {'': 2}
+'%(a)d' % {'a': 1, 'b': 2, 'c': 3}
+'%(q)d' % {'a': 1, 'b': 2, 'c': 3}  # E: Key 'q' not found in mapping
+'%(a)d %%' % {'a': 1}
+
+[builtins fixtures/dict.pyi]
+
+[case testStringInterpolationMappingDictTypes]
+from typing import Any, Dict
+a = None # type: Any
+ds, do, di = None, None, None # type: Dict[str, int], Dict[object, int], Dict[int, int]
+'%(a)' % 1  # E: Format requires a mapping (expression has type "int", expected type for mapping is Dict[Any, Any])
+'%()d' % a
+'%()d' % ds
+'%()d' % do
+[builtins fixtures/dict.pyi]
+
+[case testStringInterpolationMappingInvalidDictTypes-skip]
+from typing import Any, Dict
+di = None # type: Dict[int, int]
+'%()d' % di  # E: Format requires a mapping (expression has type Dict[int, int], expected type for mapping is Dict[str, Any])
+[builtins fixtures/dict.pyi]
+
+[case testStringInterpolationMappingInvalidSpecifiers]
+'%(a)d %d' % 1  # E: String interpolation mixes specifier with and without mapping keys
+'%(b)*d' % 1  # E: String interpolation contains both stars and mapping keys
+'%(b).*d' % 1  # E: String interpolation contains both stars and mapping keys
+
+[case testStringInterpolationMappingFlagsAndLengthModifiers]
+'%(a)1d' % {'a': 1}
+'%(a).1d' % {'a': 1}
+'%(a)#1.1ld' % {'a': 1}
+[builtins fixtures/dict.pyi]
+
+[case testStringInterpolationFloatPrecision]
+'%.f' % 1.2
+'%.3f' % 1.2
+'%.f' % 'x'
+'%.3f' % 'x'
+[builtins fixtures/primitives.pyi]
+[out]
+main:3: error: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]")
+main:4: error: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float]")
+
+[case testStringInterpolationSpaceKey]
+'%( )s' % {' ': 'foo'}
+
+[case testByteByteInterpolation]
+def foo(a: bytes, b: bytes):
+    b'%s:%s' % (a, b)
+foo(b'a', b'b') == b'a:b'
+
+[case testBytePercentInterpolationSupported]
+b'%s' % (b'xyz',)
+b'%(name)s' % {'name': 'jane'}
+b'%c' % (123)
+
+[case testUnicodeInterpolation_python2]
+u'%s' % (u'abc',)
+
+-- Lambdas
+-- -------
+
+
+[case testTrivialLambda]
+from typing import Callable
+f = lambda: 1 # type: Callable[[], int]
+f = lambda: ''.x
+f = lambda: ''
+[out]
+main:3: error: "str" has no attribute "x"
+main:4: error: Incompatible types in assignment (expression has type Callable[[], str], variable has type Callable[[], int])
+main:4: error: Incompatible return value type (got "str", expected "int")
+
+[case testVoidLambda]
+import typing
+def void() -> None:
+    pass
+x = lambda: void() # type: typing.Callable[[], None]
+
+
+-- List comprehensions
+-- -------------------
+
+
+[case testSimpleListComprehension]
+from typing import List
+a = None # type: List[A]
+a = [x for x in a]
+b = [x for x in a] # type: List[B] # E: List comprehension has incompatible type List[A]
+class A: pass
+class B: pass
+[builtins fixtures/for.pyi]
+
+[case testSimpleListComprehensionNestedTuples]
+from typing import List, Tuple
+l = None # type: List[Tuple[A, Tuple[A, B]]]
+a = [a2 for a1, (a2, b1) in l] # type: List[A]
+b = [a2 for a1, (a2, b1) in l] # type: List[B] # E: List comprehension has incompatible type List[A]
+class A: pass
+class B: pass
+[builtins fixtures/for.pyi]
+
+[case testSimpleListComprehensionNestedTuples2]
+from typing import List, Tuple
+l = None # type: List[Tuple[int, Tuple[int, str]]]
+a = [f(d) for d, (i, s) in l]
+b = [f(s) for d, (i, s) in l] # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+def f(x: int): pass
+[builtins fixtures/for.pyi]
+
+[case testListComprehensionWithNonDirectMapping]
+from typing import List
+a = None # type: List[A]
+b = None # type: List[B]
+b = [f(x) for x in a]
+a = [f(x) for x in a] # E: List comprehension has incompatible type List[B]
+([f(x) for x in b])   # E: Argument 1 to "f" has incompatible type "B"; expected "A"
+class A: pass
+class B: pass
+def f(a: A) -> B: pass
+[builtins fixtures/for.pyi]
+
+[case testErrorInListComprehensionCondition]
+from typing import List
+a = None # type: List[A]
+a = [x for x in a if x()] # E: "A" not callable
+class A: pass
+[builtins fixtures/for.pyi]
+
+[case testTypeInferenceOfListComprehension]
+from typing import List
+a = None # type: List[A]
+o = [x for x in a] # type: List[object]
+class A: pass
+[builtins fixtures/for.pyi]
+
+[case testSimpleListComprehensionInClassBody]
+from typing import List
+class A:
+    a = None # type: List[A]
+    a = [x for x in a]
+    b = [x for x in a] # type: List[B] # E: List comprehension has incompatible type List[A]
+class B: pass
+[builtins fixtures/for.pyi]
+[out]
+
+
+-- Set comprehension
+-- -----------------
+
+
+[case testSimpleSetComprehension]
+from typing import Set
+a = None # type: Set[A]
+a = {x for x in a}
+b = {x for x in a} # type: Set[B] # E: Set comprehension has incompatible type Set[A]
+class A: pass
+class B: pass
+[builtins fixtures/set.pyi]
+
+
+-- Dictionary comprehension
+-- ------------------------
+
+
+[case testSimpleDictionaryComprehension]
+from typing import Dict, List, Tuple
+abd = None # type: Dict[A, B]
+abl = None # type: List[Tuple[A, B]]
+abd = {a: b for a, b in abl}
+x = {a: b for a, b in abl} # type: Dict[B, A]
+y = {a: b for a, b in abl} # type: A
+class A: pass
+class B: pass
+[builtins fixtures/dict.pyi]
+[out]
+main:5: error: Key expression in dictionary comprehension has incompatible type "A"; expected type "B"
+main:5: error: Value expression in dictionary comprehension has incompatible type "B"; expected type "A"
+main:6: error: Incompatible types in assignment (expression has type Dict[A, B], variable has type "A")
+
+
+[case testDictionaryComprehensionWithNonDirectMapping]
+from typing import Dict, List, Tuple
+abd = None # type: Dict[A, B]
+abl = None # type: List[Tuple[A, B]]
+abd = {a: f(b) for a, b in abl}
+class A: pass
+class B: pass
+class C: pass
+def f(b: A) -> C: pass
+[builtins fixtures/dict.pyi]
+[out]
+main:4: error: Value expression in dictionary comprehension has incompatible type "C"; expected type "B"
+main:4: error: Argument 1 to "f" has incompatible type "B"; expected "A"
+
+
+-- Generator expressions
+-- ---------------------
+
+
+[case testSimpleGeneratorExpression]
+from typing import Iterator
+# The implementation is mostly identical to list comprehensions, so a single
+# test case is ok.
+a = None # type: Iterator[int]
+a = (x for x in a)
+b = None # type: Iterator[str]
+b = (x for x in a) # E: Generator has incompatible item type "int"
+[builtins fixtures/for.pyi]
+
+
+-- Conditional expressions
+-- -----------------------
+
+
+[case testSimpleConditionalExpression]
+import typing
+y = ''
+x = 1 if y else 2
+x = 3
+x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testConditionalExpressionWithEmptyCondition]
+import typing
+def f() -> None: pass
+x = 1 if f() else 2 # E: "f" does not return a value
+
+[case testConditionalExpressionWithSubtyping]
+import typing
+class A: pass
+class B(A): pass
+x = B() if bool() else A()
+x = A()
+x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "A")
+y = A() if bool() else B()
+y = A()
+y = '' # E: Incompatible types in assignment (expression has type "str", variable has type "A")
+[builtins fixtures/bool.pyi]
+
+[case testConditionalExpressionAndTypeContext]
+import typing
+x = [1] if bool() else []
+x = [1]
+x = ['x'] # E: List item 0 has incompatible type "str"
+[builtins fixtures/list.pyi]
+
+
+-- Special cases
+-- -------------
+
+
+[case testOperationsWithNonInstanceTypes]
+from typing import cast
+class A:
+    def __add__(self, a: 'A') -> 'A': pass
+a = None # type: A
+None + a   # Fail
+f + a      # Fail
+a + f      # Fail
+cast(A, f)
+
+def f() -> None:
+    pass
+[out]
+main:5: error: Unsupported left operand type for + (None)
+main:6: error: Unsupported left operand type for + (Callable[[], None])
+main:7: error: Unsupported operand types for + ("A" and Callable[[], None])
+
+[case testOperatorMethodWithInvalidArgCount]
+
+a = None # type: A
+a + a  # Fail
+
+class A:
+    def __add__(self) -> 'A':
+        pass
+[out]
+main:3: error: Too many arguments for "__add__" of "A"
+
+[case testOperatorMethodAsVar]
+from typing import Any
+class A:
+    def __init__(self, _add: Any) -> None:
+        self.__add__ = _add
+a = None # type: A
+a + a
+[out]
+
+[case testOperatorMethodAsVar2]
+
+class A:
+    def f(self, x: int) -> str: pass
+    __add__ = f
+s = None  # type: str
+s = A() + 1
+A() + (A() + 1)
+[out]
+main:7: error: Argument 1 has incompatible type "str"; expected "int"
+
+[case testIndexedLvalueWithSubtypes]
+
+a, b, c = None, None, None # type: (A, B, C)
+a[c] = c
+a[b] = c
+a[c] = b
+
+class A:
+    def __setitem__(self, x: 'B', y: 'B') -> None:
+        pass
+class B:
+    pass
+class C(B):
+    pass
+[out]
+
+
+-- Ellipsis
+-- --------
+
+
+[case testEllipsis]
+
+a = None # type: A
+a = ...  # E: Incompatible types in assignment (expression has type "ellipsis", variable has type "A")
+b = ...
+c = ...
+b = c
+....__class__
+....a  # E: "ellipsis" has no attribute "a"
+
+class A: pass
+[file builtins.py]
+class object:
+    def __init__(self): pass
+class ellipsis:
+    def __init__(self): pass
+    __class__ = object()
+class type: pass
+class function: pass
+class str: pass
+[out]
+
+
+-- Yield expression
+-- ----------------
+
+
+[case testYieldExpression]
+def f(x: int) -> None:
+    x = yield f('')
+    x = 1
+[builtins fixtures/for.pyi]
+[out]
+main:1: error: The return type of a generator function should be "Generator" or one of its supertypes
+main:2: error: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+[case testYieldExpressionWithNone]
+from typing import Iterator
+def f(x: int) -> Iterator[None]:
+    (yield)
+[builtins fixtures/for.pyi]
+[out]
+
+
+-- Yield from expression
+-- ----------------
+
+
+[case testYieldFromIteratorHasNoValue]
+from typing import Iterator
+def f() -> Iterator[int]:
+    yield 5
+def g() -> Iterator[int]:
+    a = yield from f()
+[out]
+main:5: error: Function does not return a value
+
+[case testYieldFromGeneratorHasValue]
+from typing import Iterator, Generator
+def f() -> Generator[int, None, str]:
+    yield 5
+    return "ham"
+def g() -> Iterator[int]:
+    a = "string"
+    a = yield from f()
+[out]
+
+
+-- dict(...)
+-- ---------
+
+
+-- Note that the stub used in unit tests does not have all overload
+-- variants, but it should not matter.
+
+[case testDictWithKeywordArgsOnly]
+from typing import Dict, Any
+d1 = dict(a=1, b=2) # type: Dict[str, int]
+d2 = dict(a=1, b='') # type: Dict[str, int] # E: List item 1 has incompatible type "Tuple[str, str]"
+d3 = dict(a=1) # type: Dict[int, int] # E: List item 0 has incompatible type "Tuple[str, int]"
+d4 = dict(a=1, b=1)
+d4.xyz # E: Dict[str, int] has no attribute "xyz"
+d5 = dict(a=1, b='') # type: Dict[str, Any]
+[builtins fixtures/dict.pyi]
+
+[case testDictWithoutKeywordArgs]
+from typing import Dict
+d = dict() # E: Need type annotation for variable
+d2 = dict() # type: Dict[int, str]
+dict(undefined) # E: Name 'undefined' is not defined
+[builtins fixtures/dict.pyi]
+
+[case testDictFromList]
+from typing import Dict
+d = dict([(1, 'x'), (2, 'y')])
+d() # E: Dict[int, str] not callable
+d2 = dict([(1, 'x')]) # type: Dict[str, str] # E: List item 0 has incompatible type "Tuple[int, str]"
+[builtins fixtures/dict.pyi]
+
+[case testDictFromIterableAndKeywordArg]
+from typing import Dict
+it = [('x', 1)]
+
+d = dict(it, x=1)
+d() # E: Dict[str, int] not callable
+
+d2 = dict(it, x='') # E: Cannot infer type argument 2 of "dict"
+d2() # E: Dict[Any, Any] not callable
+
+d3 = dict(it, x='') # type: Dict[str, int] # E: Argument 2 to "dict" has incompatible type "str"; expected "int"
+[builtins fixtures/dict.pyi]
+
+[case testDictFromIterableAndKeywordArg2]
+it = [(1, 'x')]
+dict(it, x='y') # E: Keyword argument only valid with "str" key type in call to "dict"
+[builtins fixtures/dict.pyi]
+
+[case testDictFromIterableAndKeywordArg3]
+d = dict([], x=1)
+d() # E: Dict[str, int] not callable
+[builtins fixtures/dict.pyi]
+
+[case testDictFromIterableAndStarStarArgs]
+from typing import Dict
+it = [('x', 1)]
+
+kw = {'x': 1}
+d = dict(it, **kw)
+d() # E: Dict[str, int] not callable
+
+kw2 = {'x': ''}
+d2 = dict(it, **kw2) # E: Cannot infer type argument 2 of "dict"
+d2() # E: Dict[Any, Any] not callable
+
+d3 = dict(it, **kw2) # type: Dict[str, int] # E: Argument 2 to "dict" has incompatible type **Dict[str, str]; expected "int"
+[builtins fixtures/dict.pyi]
+
+[case testDictFromIterableAndStarStarArgs2]
+it = [(1, 'x')]
+kw = {'x': 'y'}
+d = dict(it, **kw) # E: Keyword argument only valid with "str" key type in call to "dict"
+d() # E: Dict[int, str] not callable
+[builtins fixtures/dict.pyi]
+
+[case testUserDefinedClassNamedDict]
+from typing import Generic, TypeVar
+T = TypeVar('T')
+S = TypeVar('S')
+class dict(Generic[T, S]):
+    def __init__(self, x: T, **kwargs: T) -> None: pass
+dict(1, y=1)
+[builtins fixtures/dict.pyi]
+
+[case testSpecialSignatureForSubclassOfDict]
+from typing import TypeVar, Dict, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class D1(dict): pass # Implicit base class Dict[Any, Any]
+D1([(1, 2)], x=1)
+class D2(Dict[T, S], Generic[T, S]): pass
+da = D2([('x', 2)], x=1)
+da() # E: D2[str, int] not callable
+D2([(1, 2)], x=1) # E: Keyword argument only valid with "str" key type in call to "dict"
+db = D2(x=1)
+db() # E: D2[str, int] not callable
+[builtins fixtures/dict.pyi]
+
+[case testSpecialSignatureForSubclassOfDict2]
+from typing import TypeVar, Dict, Generic
+T = TypeVar('T')
+class D(Dict[str, T], Generic[T]): pass
+D([('x', 1)], x=1)
+[builtins fixtures/dict.pyi]
+
+[case testOverridingSpecialSignatureInSubclassOfDict]
+from typing import TypeVar, Dict, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class D(Dict[T, S], Generic[T, S]):
+    def __init__(self, x: S, y: T) -> None: pass
+d = D(1, y='')
+d() # E: D[str, int] not callable
+[builtins fixtures/dict.pyi]
+
+[case testRevealType]
+reveal_type(1) # E: Revealed type is 'builtins.int'
+
+[case testUndefinedRevealType]
+reveal_type(x)
+[out]
+main:1: error: Revealed type is 'Any'
+main:1: error: Name 'x' is not defined
+
+[case testUserDefinedRevealType]
+def reveal_type(x: int) -> None: pass
+reveal_type("foo") # E: Argument 1 to "reveal_type" has incompatible type "str"; expected "int"
+
+[case testRevealTypeVar]
+reveal_type = 1
+1 + "foo" # E: Unsupported operand types for + ("int" and "str")
+
+[case testRevealForward]
+def f() -> None:
+    reveal_type(x)
+x = 1 + 1
+[out]
+main:2: error: Revealed type is 'builtins.int'
+
+[case testEqNone]
+None == None
+[builtins fixtures/ops.pyi]
+
+[case testLtNone]
+None < None  # E: Unsupported left operand type for < (None)
+[builtins fixtures/ops.pyi]
+
+[case testDictWithStarExpr]
+# flags: --fast-parser
+b = {'z': 26, *a}  # E: invalid syntax
+[builtins fixtures/dict.pyi]
+
+[case testDictWithStarStarExpr]
+# flags: --fast-parser
+from typing import Dict
+a = {'a': 1}
+b = {'z': 26, **a}
+c = {**b}
+d = {**a, **b, 'c': 3}
+e = {1: 'a', **a}  # E: Argument 1 to "update" of "dict" has incompatible type Dict[str, int]; expected Mapping[int, str]
+f = {**b}  # type: Dict[int, int]  # E: List item 0 has incompatible type Dict[str, int]
+[builtins fixtures/dict.pyi]
diff --git a/test-data/unit/check-fastparse.test b/test-data/unit/check-fastparse.test
new file mode 100644
index 0000000..f982838
--- /dev/null
+++ b/test-data/unit/check-fastparse.test
@@ -0,0 +1,301 @@
+[case testFastParseSyntaxError]
+# flags: --fast-parser
+1 +  # E: invalid syntax
+
+[case testFastParseTypeCommentSyntaxError]
+# flags: --fast-parser
+x = None # type: a : b  # E: syntax error in type comment
+
+[case testFastParseInvalidTypeComment]
+# flags: --fast-parser
+x = None # type: a + b  # E: invalid type comment
+
+-- Function type comments are attributed to the function def line.
+-- This happens in both parsers.
+[case testFastParseFunctionAnnotationSyntaxError]
+# flags: --fast-parser
+def f():  # E: syntax error in type comment
+  # type: None -> None
+  pass
+
+[case testFastParseInvalidFunctionAnnotation]
+# flags: --fast-parser
+def f(x):  # E: invalid type comment
+  # type: (a + b) -> None
+  pass
+
+[case testFastParseProperty]
+# flags: --fast-parser
+class C:
+  @property
+  def x(self) -> str: pass
+  @x.setter
+  def x(self, value: str) -> None: pass
+[builtins fixtures/property.pyi]
+
+[case testFastParseConditionalProperty]
+# flags: --fast-parser
+class C:
+  if bool():
+    @property
+    def x(self) -> str: pass
+    @x.setter
+    def x(self, value: str) -> None: pass
+[builtins fixtures/property.pyi]
+
+[case testFastParsePerArgumentAnnotations]
+# flags: --fast-parser
+class A: pass
+class B: pass
+class C: pass
+class D: pass
+class E: pass
+class F: pass
+def f(a,        # type: A
+      b = None, # type: B
+      *args,    # type: C
+      d = None, # type: D
+      e,        # type: E
+      **kwargs  # type: F
+      ):
+    reveal_type(a)      # E: Revealed type is '__main__.A'
+    reveal_type(b)      # E: Revealed type is '__main__.B'
+    reveal_type(args)   # E: Revealed type is 'builtins.tuple[__main__.C]'
+    reveal_type(d)      # E: Revealed type is '__main__.D'
+    reveal_type(e)      # E: Revealed type is '__main__.E'
+    reveal_type(kwargs) # E: Revealed type is 'builtins.dict[builtins.str, __main__.F]'
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testFastParsePerArgumentAnnotationsWithReturn]
+# flags: --fast-parser
+class A: pass
+class B: pass
+class C: pass
+class D: pass
+class E: pass
+class F: pass
+def f(a,        # type: A
+      b = None, # type: B
+      *args,    # type: C
+      d = None, # type: D
+      e,        # type: E
+      **kwargs  # type: F
+      ):
+      # type: (...) -> int
+    reveal_type(a)      # E: Revealed type is '__main__.A'
+    reveal_type(b)      # E: Revealed type is '__main__.B'
+    reveal_type(args)   # E: Revealed type is 'builtins.tuple[__main__.C]'
+    reveal_type(d)      # E: Revealed type is '__main__.D'
+    reveal_type(e)      # E: Revealed type is '__main__.E'
+    reveal_type(kwargs) # E: Revealed type is 'builtins.dict[builtins.str, __main__.F]'
+    return "not an int"  # E: Incompatible return value type (got "str", expected "int")
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testFastParsePerArgumentAnnotationsWithAnnotatedBareStar]
+# flags: --fast-parser
+def f(*, # type: int  # E: bare * has associated type comment
+      x  # type: str
+      ):
+      # type: (...) -> int
+    pass
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testFastParsePerArgumentAnnotationsWithReturnAndBareStar]
+# flags: --fast-parser
+def f(*,
+      x  # type: str
+      ):
+      # type: (...) -> int
+    reveal_type(x) # E: Revealed type is 'builtins.str'
+    return "not an int"  # E: Incompatible return value type (got "str", expected "int")
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testFastParsePerArgumentAnnotations_python2]
+# flags: --fast-parser
+class A: pass
+class B: pass
+class C: pass
+class D: pass
+def f(a,        # type: A
+      b = None, # type: B
+      *args     # type: C
+      # kwargs not tested due to lack of 2.7 dict fixtures
+      ):
+    reveal_type(a)      # E: Revealed type is '__main__.A'
+    reveal_type(b)      # E: Revealed type is '__main__.B'
+    reveal_type(args)   # E: Revealed type is 'builtins.tuple[__main__.C]'
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testFastParsePerArgumentAnnotationsWithReturn_python2]
+# flags: --fast-parser
+class A: pass
+class B: pass
+class C: pass
+class D: pass
+def f(a,        # type: A
+      b = None, # type: B
+      *args     # type: C
+      # kwargs not tested due to lack of 2.7 dict fixtures
+      ):
+      # type: (...) -> int
+    reveal_type(a)      # E: Revealed type is '__main__.A'
+    reveal_type(b)      # E: Revealed type is '__main__.B'
+    reveal_type(args)   # E: Revealed type is 'builtins.tuple[__main__.C]'
+    return "not an int"  # E: Incompatible return value type (got "str", expected "int")
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testFasterParseTooManyArgumentsAnnotation]
+# flags: --fast-parser
+def f():  # E: Type signature has too many arguments
+    # type: (int) -> None
+    pass
+
+[case testFasterParseTooFewArgumentsAnnotation]
+# flags: --fast-parser
+def f(x):  # E: Type signature has too few arguments
+    # type: () -> None
+    pass
+
+[case testFasterParseTypeCommentError_python2]
+# flags: --fast-parser
+from typing import Tuple
+def f(a):
+    # type: (Tuple(int, int)) -> int
+    pass
+[out]
+main:3: error: invalid type comment
+
+[case testFastParseMatMul]
+# flags: --fast-parser
+from typing import Any
+x = None  # type: Any
+x @ 1
+x @= 1
+
+[case testIncorrectTypeCommentIndex]
+# flags: --fast-parser
+from typing import Dict
+x = None # type: Dict[x: y]
+[out]
+main:3: error: syntax error in type comment
+
+[case testPrintStatementTrailingCommaFastParser_python2]
+# flags: --fast-parser
+print 0,
+print 1, 2,
+
+[case testFastParserShowsMultipleErrors]
+def f(x):  # E: Type signature has too few arguments
+    # type: () -> None
+    pass
+def g():  # E: Type signature has too many arguments
+    # type: (int) -> None
+    pass
+
+[case testFastParseMalformedAssert]
+# flags: --fast-parser
+assert 1, 2
+assert (1, 2)  # W: Assertion is always true, perhaps remove parentheses?
+assert (1, 2), 3  # W: Assertion is always true, perhaps remove parentheses?
+assert ()
+assert (1,)  # W: Assertion is always true, perhaps remove parentheses?
+
+[case testFastParseAssertMessage]
+# flags: --fast-parser
+assert 1
+assert 1, 2
+assert 1, 1+2
+assert 1, 1+'test'  # E: Unsupported operand types for + ("int" and "str")
+assert 1, f()  # E: Name 'f' is not defined
+
+[case testFastParserConsistentFunctionTypes]
+# flags: --fast-parser
+def f(x, y, z):
+  # type: (int, int, int) -> int
+  pass
+
+def f(x,  # type: int  # E: Function has duplicate type signatures
+      y,  # type: int
+      z   # type: int
+    ):
+    # type: (int, int, int) -> int
+    pass
+
+def f(x,  # type: int
+      y,  # type: int
+      z   # type: int
+    ):
+    # type: (...) -> int
+    pass
+
+def f(x, y, z):
+  # type: (int, int, int) -> int
+  pass
+
+def f(x) -> int:  # E: Function has duplicate type signatures
+  # type: (int) -> int
+  pass
+
+def f(x: int, y: int, z: int):
+  # type: (...) -> int
+  pass
+
+def f(x: int):  # E: Function has duplicate type signatures
+  # type: (int) -> int
+  pass
+
+[case testFastParserDuplicateNames]
+# flags: --fast-parser
+def f(x, y, z):
+  pass
+
+def g(x, y, x):  # E: duplicate argument 'x' in function definition
+  pass
+
+def h(x, y, *x):  # E: duplicate argument 'x' in function definition
+  pass
+
+def i(x, y, *z, **z):  # E: duplicate argument 'z' in function definition
+  pass
+
+def j(x: int, y: int, *, x: int = 3):  # E: duplicate argument 'x' in function definition
+  pass
+
+def k(*, y, z, y):  # E: duplicate argument 'y' in function definition
+  pass
+
+lambda x, y, x: ...  # E: duplicate argument 'x' in function definition
+
+[case testFastParserDuplicateNames_python2]
+# flags: --fast-parser
+def f(x, y, z):
+  pass
+
+def g(x, y, x):  # E: duplicate argument 'x' in function definition
+  pass
+
+def h(x, y, *x):  # E: duplicate argument 'x' in function definition
+  pass
+
+def i(x, y, *z, **z):  # E: duplicate argument 'z' in function definition
+  pass
+
+def j(x, (y, y), z):  # E: duplicate argument 'y' in function definition
+  pass
+
+def k(x, (y, x)):  # E: duplicate argument 'x' in function definition
+  pass
+
+def l((x, y), (z, x)):  # E: duplicate argument 'x' in function definition
+  pass
+
+def m(x, ((x, y), z)):  # E: duplicate argument 'x' in function definition
+  pass
+
+lambda x, (y, x): None  # E: duplicate argument 'x' in function definition
diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test
new file mode 100644
index 0000000..9d057d8
--- /dev/null
+++ b/test-data/unit/check-flags.test
@@ -0,0 +1,305 @@
+[case testUnannotatedFunction]
+# flags: --disallow-untyped-defs
+def f(x): pass
+[out]
+main:2: error: Function is missing a type annotation
+
+[case testUnannotatedArgument]
+# flags: --disallow-untyped-defs
+def f(x) -> int: pass
+[out]
+main:2: error: Function is missing a type annotation for one or more arguments
+
+[case testUnannotatedArgumentWithFastParser]
+# flags: --fast-parser --disallow-untyped-defs
+def f(x) -> int: pass
+[out]
+main:2: error: Function is missing a type annotation for one or more arguments
+
+[case testNoArgumentFunction]
+# flags: --disallow-untyped-defs
+def f() -> int: pass
+[out]
+
+[case testUnannotatedReturn]
+# flags: --disallow-untyped-defs
+def f(x: int): pass
+[out]
+main:2: error: Function is missing a return type annotation
+
+[case testUnannotatedReturnWithFastParser]
+# flags: --fast-parser --disallow-untyped-defs
+def f(x: int): pass
+[out]
+main:2: error: Function is missing a return type annotation
+
+[case testLambda]
+# flags: --disallow-untyped-defs
+lambda x: x
+[out]
+
+[case testUntypedDef]
+# flags: --disallow-untyped-defs
+def f():
+    1 + "str"
+[out]
+main:2: error: Function is missing a type annotation
+
+[case testSubclassingAny]
+# flags: --disallow-subclassing-any
+from typing import Any
+FakeClass = None  # type: Any
+class Foo(FakeClass): pass  # E: Class cannot subclass 'FakeClass' (has type 'Any')
+[out]
+
+[case testSubclassingAnyMultipleBaseClasses]
+# flags: --disallow-subclassing-any
+from typing import Any
+FakeClass = None  # type: Any
+class ActualClass: pass
+class Foo(ActualClass, FakeClass): pass  # E: Class cannot subclass 'FakeClass' (has type 'Any')
+[out]
+
+[case testSubclassingAnySilentImports]
+# flags: --disallow-subclassing-any --follow-imports=skip
+# cmd: mypy -m main
+
+[file main.py]
+from ignored_module import BaseClass
+class Foo(BaseClass): pass
+
+[file ignored_module.py]
+class BaseClass: pass
+
+[out]
+tmp/main.py:2: error: Class cannot subclass 'BaseClass' (has type 'Any')
+
+[case testSubclassingAnySilentImports2]
+# flags: --disallow-subclassing-any --follow-imports=skip
+# cmd: mypy -m main
+
+[file main.py]
+import ignored_module
+class Foo(ignored_module.BaseClass): pass
+
+[file ignored_module.py]
+class BaseClass: pass
+
+[out]
+tmp/main.py:2: error: Class cannot subclass 'BaseClass' (has type 'Any')
+
+[case testWarnNoReturnIgnoresTrivialFunctions]
+# flags: --warn-no-return
+def f() -> int:
+  pass
+def g() -> int:
+  ...
+def h() -> int:
+  """with docstring"""
+  pass
+def i() -> int:
+  """with docstring"""
+  ...
+def j() -> int:
+  u"""with unicode docstring"""
+  pass
+def k() -> int:
+  """docstring only"""
+
+[case testWarnNoReturnWorksWithAlwaysTrue]
+# flags: --warn-no-return
+PY3 = True
+def f() -> int:
+    if PY3:
+        return 0
+    else:
+        return 0
+[builtins fixtures/bool.pyi]
+
+[case testWarnNoReturnWorksWithAlwaysFalse]
+# flags: --warn-no-return
+PY2 = False
+def f() -> int:
+    if PY2:
+        return 0
+    else:
+        return 0
+[builtins fixtures/bool.pyi]
+
+[case testWarnNoReturnWorksWithMypyTrue]
+# flags: --warn-no-return
+MYPY = False
+def f() -> int:
+    if MYPY:
+        return 0
+    else:
+        return 0
+[builtins fixtures/bool.pyi]
+
+[case testShowErrorContextFunction]
+# flags: --show-error-context
+def f() -> None:
+  0 + ""
+[out]
+main: note: In function "f":
+main:3: error: Unsupported operand types for + ("int" and "str")
+
+[case testShowErrorContextClass]
+# flags: --show-error-context
+class A:
+  0 + ""
+[out]
+main: note: In class "A":
+main:3: error: Unsupported operand types for + ("int" and "str")
+
+[case testShowErrorContextMember]
+# flags: --show-error-context
+class A:
+  def f(self, x: int) -> None:
+    self.f("")
+[out]
+main: note: In member "f" of class "A":
+main:4: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+
+[case testShowErrorContextModule]
+# flags: --show-error-context
+import m
+[file m.py]
+0 + ""
+[out]
+main:2: note: In module imported here:
+tmp/m.py:1: error: Unsupported operand types for + ("int" and "str")
+
+[case testShowErrorContextTopLevel]
+# flags: --show-error-context
+def f() -> None:
+  0 + ""
+0 + ""
+[out]
+main: note: In function "f":
+main:3: error: Unsupported operand types for + ("int" and "str")
+main: note: At top level:
+main:4: error: Unsupported operand types for + ("int" and "str")
+
+[case testShowErrorContextFromHere]
+# flags: --show-error-context
+import a
+[file a.py]
+import b
+[file b.py]
+0 + ""
+[out]
+tmp/a.py:1: note: In module imported here,
+main:2: note: ... from here:
+tmp/b.py:1: error: Unsupported operand types for + ("int" and "str")
+
+[case testFollowImportsNormal]
+# flags: --follow-imports=normal
+from mod import x
+x + ""
+[file mod.py]
+1 + ""
+x = 0
+[out]
+tmp/mod.py:1: error: Unsupported operand types for + ("int" and "str")
+main:3: error: Unsupported operand types for + ("int" and "str")
+
+[case testFollowImportsSilent]
+# flags: --follow-imports=silent
+from mod import x
+x + ""  # E: Unsupported operand types for + ("int" and "str")
+[file mod.py]
+1 + ""
+x = 0
+
+[case testFollowImportsSkip]
+# flags: --follow-imports=skip
+from mod import x
+x + ""
+[file mod.py]
+this deliberate syntax error will not be reported
+[out]
+
+[case testFollowImportsError]
+# flags: --follow-imports=error
+from mod import x
+x + ""
+[file mod.py]
+deliberate syntax error
+[out]
+main:2: note: Import of 'mod' ignored
+main:2: note: (Using --follow-imports=error, module not passed on command line)
+
+[case testIgnoreMissingImportsFalse]
+from mod import x
+[out]
+main:1: error: Cannot find module named 'mod'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testIgnoreMissingImportsTrue]
+# flags: --ignore-missing-imports
+from mod import x
+[out]
+
+[case testStrictBoolean]
+# flags: --strict-boolean
+if True:
+  pass
+if 'test':  # E: Condition must be a boolean
+  pass
+elif 1:  # E: Condition must be a boolean
+  pass
+
+def f() -> bool:
+  return True
+
+if f:  # E: Condition must be a boolean
+  pass
+
+if f():
+  pass
+
+class A:
+  def __call__(self) -> bool:
+    return False
+
+if A:  # E: Condition must be a boolean
+  pass
+
+if A():  # E: Condition must be a boolean
+  pass
+
+if A()():
+  pass
+[builtins fixtures/bool.pyi]
+
+[case testStrictBooleanTernary]
+# flags: --strict-boolean
+x = 1 if 'test' else 2  # E: Condition must be a boolean
+y = 1 if not 'test' else 2
+[builtins fixtures/bool.pyi]
+
+[case testStrictBooleanWhile]
+# flags: --strict-boolean
+while 5:  # E: Condition must be a boolean
+  pass
+
+while False:
+  pass
+[builtins fixtures/bool.pyi]
+
+[case testStrictBooleanComplexTypes]
+# flags: --strict-boolean
+from typing import Any, Type, Union
+
+x = True  # type: Any
+y = True  # type: Union[bool, int]
+z = int  # type: Type[int]
+
+if x:
+  pass
+if y:  # E: Condition must be a boolean
+  pass
+if z:  # E: Condition must be a boolean
+  pass
+[builtins fixtures/bool.pyi]
diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test
new file mode 100644
index 0000000..5fb8932
--- /dev/null
+++ b/test-data/unit/check-functions.test
@@ -0,0 +1,1666 @@
+-- Test cases for the type checker related to functions, function types and
+-- calls.
+
+-- See also check-varargs.test.
+
+
+-- Callable type basics
+-- --------------------
+
+
+[case testCallingVariableWithFunctionType]
+from typing import Callable
+f = None # type: Callable[[A], B]
+a, b = None, None # type: (A, B)
+a = f(a)    # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+b = f(b)    # E: Argument 1 has incompatible type "B"; expected "A"
+b = f()     # E: Too few arguments
+b = f(a, a) # E: Too many arguments
+b = f(a)
+
+class A: pass
+class B: pass
+
+[case testKeywordOnlyArgumentOrderInsensitivity]
+import typing
+
+class A(object):
+    def f(self, *, a: int, b: str) -> None: pass
+
+class B(A):
+    def f(self, *, b: str, a: int) -> None: pass
+
+class C(A):
+    def f(self, *, b: int, a: str) -> None: pass # E: Signature of "f" incompatible with supertype "A"
+
+[case testPositionalOverridingArgumentNameInsensitivity]
+import typing
+
+class A(object):
+    def f(self, a: int, b: str) -> None: pass
+
+class B(A):
+    def f(self, b: str, a: int) -> None: pass # E: Argument 1 of "f" incompatible with supertype "A" # E: Argument 2 of "f" incompatible with supertype "A"
+
+class C(A):
+    def f(self, foo: int, bar: str) -> None: pass
+
+
+[case testPositionalOverridingArgumentNamesCheckedWhenMismatchingPos]
+import typing
+
+class A(object):
+    def f(self, a: int, b: str) -> None: pass
+
+class B(A):
+    def f(self, b: int, a: str) -> None: pass # E: Signature of "f" incompatible with supertype "A"
+
+
+[case testSubtypingFunctionTypes]
+from typing import Callable
+
+class A: pass
+class B(A): pass
+
+f = None # type: Callable[[B], A]
+g = None # type: Callable[[A], A]  # subtype of f
+h = None # type: Callable[[B], B]  # subtype of f
+g = h  # E: Incompatible types in assignment (expression has type Callable[[B], B], variable has type Callable[[A], A])
+h = f  # E: Incompatible types in assignment (expression has type Callable[[B], A], variable has type Callable[[B], B])
+h = g  # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[B], B])
+g = f  # E: Incompatible types in assignment (expression has type Callable[[B], A], variable has type Callable[[A], A])
+f = g
+f = h
+f = f
+g = g
+h = h
+
+[case testSubtypingFunctionsDoubleCorrespondence]
+
+def l(x) -> None: ...
+def r(__, *, x) -> None: ...
+r = l # E: Incompatible types in assignment (expression has type Callable[[Any], None], variable has type Callable[[Any, NamedArg('x', Any)], None])
+
+[case testSubtypingFunctionsRequiredLeftArgNotPresent]
+
+def l(x, y) -> None: ...
+def r(x) -> None: ...
+r = l # E: Incompatible types in assignment (expression has type Callable[[Any, Any], None], variable has type Callable[[Any], None])
+
+[case testSubtypingFunctionsImplicitNames]
+
+def f(a, b): pass
+def g(c: Any, d: Any) -> Any: pass
+
+ff = f
+gg = g
+
+gg = f
+ff = g
+
+[case testSubtypingFunctionsDefaultsNames]
+from typing import Callable
+
+def f(a: int, b: str) -> None: pass
+f_nonames = None # type: Callable[[int, str], None]
+def g(a: int, b: str = "") -> None: pass
+def h(aa: int, b: str = "") -> None: pass
+
+ff_nonames = f_nonames
+ff = f
+gg = g
+hh = h
+
+ff = gg
+ff_nonames = ff
+ff_nonames = f_nonames # reset
+ff = ff_nonames # E: Incompatible types in assignment (expression has type Callable[[int, str], None], variable has type Callable[[Arg('a', int), Arg('b', str)], None])
+ff = f # reset
+gg = ff # E: Incompatible types in assignment (expression has type Callable[[Arg('a', int), Arg('b', str)], None], variable has type Callable[[Arg('a', int), DefaultArg('b', str)], None])
+gg = hh # E: Incompatible types in assignment (expression has type Callable[[Arg('aa', int), DefaultArg('b', str)], None], variable has type Callable[[Arg('a', int), DefaultArg('b', str)], None])
+
+[case testSubtypingFunctionsArgsKwargs]
+from typing import Any, Callable
+
+def everything(*args: Any, **kwargs: Any) -> None: pass
+everywhere = None # type: Callable[..., None]
+
+def specific_1(a: int, b: str) -> None: pass
+def specific_2(a: int, *, b: str) -> None: pass
+
+ss_1 = specific_1
+ss_2 = specific_2
+ee_def = everything
+ee_var = everywhere
+
+ss_1 = ee_def
+ss_1 = specific_1
+ss_2 = ee_def
+ss_2 = specific_2
+ee_def = everywhere
+ee_def = everything
+ee_var = everything
+ee_var = everywhere
+
+ee_var = specific_1 # The difference between Callable[..., blah] and one with a *args: Any, **kwargs: Any is that the ... goes loosely both ways.
+ee_def = specific_1 # E: Incompatible types in assignment (expression has type Callable[[int, str], None], variable has type Callable[[StarArg(Any), KwArg(Any)], None])
+
+[builtins fixtures/dict.pyi]
+
+[case testLackOfNames]
+def f(__a: int, __b: str) -> None: pass
+def g(a: int, b: str) -> None: pass
+
+ff = f
+gg = g
+
+ff = g
+gg = f # E: Incompatible types in assignment (expression has type Callable[[int, str], None], variable has type Callable[[Arg('a', int), Arg('b', str)], None])
+
+[case testLackOfNamesFastparse]
+# flags: --fast-parser
+
+def f(__a: int, __b: str) -> None: pass
+def g(a: int, b: str) -> None: pass
+
+ff = f
+gg = g
+
+ff = g
+gg = f # E: Incompatible types in assignment (expression has type Callable[[int, str], None], variable has type Callable[[Arg('a', int), Arg('b', str)], None])
+
+[case testFunctionTypeCompatibilityWithOtherTypes]
+from typing import Callable
+f = None # type: Callable[[], None]
+a, o = None, None # type: (A, object)
+a = f   # E: Incompatible types in assignment (expression has type Callable[[], None], variable has type "A")
+f = a   # E: Incompatible types in assignment (expression has type "A", variable has type Callable[[], None])
+f = o   # E: Incompatible types in assignment (expression has type "object", variable has type Callable[[], None])
+f = f() # E: Function does not return a value
+
+f = f
+f = None
+o = f
+
+class A: pass
+
+[case testFunctionSubtypingWithVoid]
+from typing import Callable
+f = None # type: Callable[[], None]
+g = None # type: Callable[[], object]
+f = g  # E: Incompatible types in assignment (expression has type Callable[[], object], variable has type Callable[[], None])
+g = f  # E: Incompatible types in assignment (expression has type Callable[[], None], variable has type Callable[[], object])
+
+f = f
+g = g
+
+[case testFunctionSubtypingWithMultipleArgs]
+from typing import Callable
+f = None # type: Callable[[A, A], None]
+g = None # type: Callable[[A, B], None]
+h = None # type: Callable[[B, B], None]
+f = g  # E: Incompatible types in assignment (expression has type Callable[[A, B], None], variable has type Callable[[A, A], None])
+f = h  # E: Incompatible types in assignment (expression has type Callable[[B, B], None], variable has type Callable[[A, A], None])
+g = h  # E: Incompatible types in assignment (expression has type Callable[[B, B], None], variable has type Callable[[A, B], None])
+g = f
+h = f
+h = g
+f = f
+g = g
+h = h
+
+class A: pass
+class B(A): pass
+
+[case testFunctionTypesWithDifferentArgumentCounts]
+from typing import Callable
+f = None # type: Callable[[], None]
+g = None # type: Callable[[A], None]
+h = None # type: Callable[[A, A], None]
+
+f = g   # E: Incompatible types in assignment (expression has type Callable[[A], None], variable has type Callable[[], None])
+f = h   # E: Incompatible types in assignment (expression has type Callable[[A, A], None], variable has type Callable[[], None])
+h = f   # E: Incompatible types in assignment (expression has type Callable[[], None], variable has type Callable[[A, A], None])
+h = g   # E: Incompatible types in assignment (expression has type Callable[[A], None], variable has type Callable[[A, A], None])
+
+f = f
+g = g
+h = h
+
+class A: pass
+[out]
+
+[case testCompatibilityOfSimpleTypeObjectWithStdType]
+
+t = None # type: type
+a = None # type: A
+
+a = A # E: Incompatible types in assignment (expression has type "A" (type object), variable has type "A")
+t = f # E: Incompatible types in assignment (expression has type Callable[[], None], variable has type "type")
+t = A
+
+class A:
+    def __init__(self, a: 'A') -> None: pass
+
+def f() -> None: pass
+
+[case testFunctionTypesWithOverloads]
+from typing import Callable, overload
+f = None # type: Callable[[AA], A]
+g = None # type: Callable[[B], B]
+h = None # type: Callable[[A], AA]
+
+h = i  # E: Incompatible types in assignment (expression has type overloaded function, variable has type Callable[[A], AA])
+f = j
+
+f = i
+g = i
+g = j
+
+class A: pass
+class AA(A): pass
+
+class B: pass
+
+ at overload
+def i(x: AA) -> A:
+    pass
+ at overload
+def i(x: B) -> B:
+    pass
+
+ at overload
+def j(x: B) -> B:
+    pass
+ at overload
+def j(x: A) -> AA:
+    pass
+
+[case testOverloadWithThreeItems]
+from typing import Callable, overload
+g1 = None # type: Callable[[A], A]
+g2 = None # type: Callable[[B], B]
+g3 = None # type: Callable[[C], C]
+g4 = None # type: Callable[[A], B]
+a, b, c = None, None, None # type: (A, B, C)
+
+b = f(a)  # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = f(b)  # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+b = f(c)  # E: Incompatible types in assignment (expression has type "C", variable has type "B")
+g4 = f    # E: Incompatible types in assignment (expression has type overloaded function, variable has type Callable[[A], B])
+
+g1 = f
+g2 = f
+g3 = f
+a = f(a)
+b = f(b)
+c = f(c)
+
+class A: pass
+class B: pass
+class C: pass
+
+ at overload
+def f(x: A) -> A: pass
+ at overload
+def f(x: B) -> B: pass
+ at overload
+def f(x: C) -> C: pass
+
+[case testInferConstraintsUnequalLengths]
+from typing import Any, Callable, List
+def f(fields: List[Callable[[Any], Any]]): pass
+class C: pass
+f([C])  # E: List item 0 has incompatible type
+class D:
+    def __init__(self, a, b): pass
+f([D])  # E: List item 0 has incompatible type
+[builtins fixtures/list.pyi]
+
+-- Default argument values
+-- -----------------------
+
+
+[case testCallingFunctionsWithDefaultArgumentValues]
+
+a, b = None, None # type: (A, B)
+a = f()     # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+b = f(b)    # E: Argument 1 to "f" has incompatible type "B"; expected "A"
+b = f(a, a) # E: Too many arguments for "f"
+
+b = f()
+b = f(a)
+b = f(AA())
+
+def f(x: 'A'  =  None) -> 'B': pass
+
+class A: pass
+class AA(A): pass
+class B: pass
+
+[case testDefaultArgumentExpressions]
+import typing
+def f(x: 'A' = A()) -> None:
+    b = x # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    a = x # type: A
+
+class B: pass
+class A: pass
+[out]
+
+[case testDefaultArgumentExpressions2]
+import typing
+def f(x: 'A' = B()) -> None: # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    b = x # type: B      # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    a = x # type: A
+
+class B: pass
+class A: pass
+[out]
+
+[case testDefaultArgumentsWithSubtypes]
+import typing
+def f(x: 'B' = A()) -> None: # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    pass
+def g(x: 'A' = B()) -> None:
+    pass
+
+class A: pass
+class B(A): pass
+[out]
+
+[case testMultipleDefaultArgumentExpressions]
+import typing
+def f(x: 'A' = B(), y: 'B' = B()) -> None: # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    pass
+def h(x: 'A' = A(), y: 'B' = B()) -> None:
+    pass
+
+class A: pass
+class B: pass
+[out]
+
+[case testMultipleDefaultArgumentExpressions2]
+import typing
+def g(x: 'A' = A(), y: 'B' = A()) -> None: # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    pass
+
+class A: pass
+class B: pass
+[out]
+
+[case testDefaultArgumentsAndSignatureAsComment]
+import typing
+def f(x = 1): # type: (int) -> str
+    pass
+f()
+f(1)
+f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+[case testMethodDefaultArgumentsAndSignatureAsComment]
+import typing
+class A:
+    def f(self, x = 1): # type: (int) -> str
+        pass
+A().f()
+A().f(1)
+A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+
+
+-- Access to method defined as a data attribute
+-- --------------------------------------------
+
+
+[case testMethodAsDataAttribute]
+from typing import Any, Callable
+class B: pass
+x = None # type: Any
+class A:
+    f = x # type: Callable[[A], None]
+    g = x # type: Callable[[A, B], None]
+a = None # type: A
+a.f()
+a.g(B())
+a.f(a) # E: Too many arguments
+a.g()  # E: Too few arguments
+
+[case testMethodWithInvalidMethodAsDataAttribute]
+from typing import Any, Callable
+class B: pass
+x = None # type: Any
+class A:
+    f = x # type: Callable[[], None]
+    g = x # type: Callable[[B], None]
+a = None # type: A
+a.f() # E: Invalid method type
+a.g() # E: Invalid method type
+
+[case testMethodWithDynamicallyTypedMethodAsDataAttribute]
+from typing import Any, Callable
+class B: pass
+x = None # type: Any
+class A:
+    f = x # type: Callable[[Any], Any]
+a = None # type: A
+a.f()
+a.f(a) # E: Too many arguments
+
+[case testOverloadedMethodAsDataAttribute]
+from typing import overload
+class B: pass
+class A:
+    @overload
+    def f(self) -> None: pass
+    @overload
+    def f(self, b: B) -> None: pass
+    g = f
+a = None # type: A
+a.g()
+a.g(B())
+a.g(a) # E: No overload variant matches argument types [__main__.A]
+
+[case testMethodAsDataAttributeInferredFromDynamicallyTypedMethod]
+
+class A:
+    def f(self, x): pass
+    g = f
+a = None # type: A
+a.g(object())
+a.g(a, a) # E: Too many arguments
+a.g()     # E: Too few arguments
+
+[case testMethodAsDataAttributeInGenericClass]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class B: pass
+class A(Generic[t]):
+    def f(self, x: t) -> None: pass
+    g = f
+a = None # type: A[B]
+a.g(B())
+a.g(a)   # E: Argument 1 has incompatible type A[B]; expected "B"
+
+[case testInvalidMethodAsDataAttributeInGenericClass]
+from typing import Any, TypeVar, Generic, Callable
+t = TypeVar('t')
+class B: pass
+class C: pass
+x = None # type: Any
+class A(Generic[t]):
+    f = x # type: Callable[[A[B]], None]
+ab = None # type: A[B]
+ac = None # type: A[C]
+ab.f()
+ac.f()   # E: Invalid method type
+
+[case testPartiallyTypedSelfInMethodDataAttribute]
+from typing import Any, TypeVar, Generic, Callable
+t = TypeVar('t')
+class B: pass
+class C: pass
+x = None # type: Any
+class A(Generic[t]):
+    f = x # type: Callable[[A], None]
+ab = None # type: A[B]
+ac = None # type: A[C]
+ab.f()
+ac.f()
+
+[case testCallableDataAttribute]
+from typing import Callable
+class A:
+    g = None # type: Callable[[A], None]
+    def __init__(self, f: Callable[[], None]) -> None:
+        self.f = f
+a = A(None)
+a.f()
+a.g()
+a.f(a) # E: Too many arguments
+a.g(a) # E: Too many arguments
+
+
+-- Nested functions
+-- ----------------
+
+
+[case testSimpleNestedFunction]
+import typing
+def f(a: 'A') -> None:
+    def g(b: 'B') -> None:
+         b = a # fail
+         aa = a # type: A # ok
+         b = B()
+    g(a) # fail
+    g(B())
+class A: pass
+class B: pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+main:7: error: Argument 1 to "g" has incompatible type "A"; expected "B"
+
+[case testReturnAndNestedFunction]
+import typing
+def f() -> 'A':
+    def g() -> 'B':
+        return A() # fail
+        return B()
+    return B() # fail
+    return A()
+class A: pass
+class B: pass
+[out]
+main:4: error: Incompatible return value type (got "A", expected "B")
+main:6: error: Incompatible return value type (got "B", expected "A")
+
+[case testDynamicallyTypedNestedFunction]
+import typing
+def f(x: object) -> None:
+    def g(y):
+        pass
+    g() # E: Too few arguments for "g"
+    g(x)
+[out]
+
+[case testNestedFunctionInMethod]
+import typing
+class A:
+    def f(self) -> None:
+        def g(x: int) -> None:
+            y = x # type: int
+            a = x # type: A # fail
+        g(2)
+        g(A()) # fail
+[out]
+main:6: error: Incompatible types in assignment (expression has type "int", variable has type "A")
+main:8: error: Argument 1 to "g" has incompatible type "A"; expected "int"
+
+[case testMutuallyRecursiveNestedFunctions]
+def f() -> None:
+    def g() -> None:
+        h(1)
+        h('') # E
+    def h(x: int) -> None:
+        g()
+        g(1) # E
+[out]
+main:4: error: Argument 1 to "h" has incompatible type "str"; expected "int"
+main:7: error: Too many arguments for "g"
+
+[case testMutuallyRecursiveDecoratedFunctions]
+from typing import Callable, Any
+def dec(f) -> Callable[..., Any]: pass
+def f() -> None:
+    @dec
+    def g() -> None:
+        h()
+        h.x # E
+    @dec
+    def h(x: int) -> None:
+        g(1)
+        g.x # E
+[out]
+main:7: error: Callable[..., Any] has no attribute "x"
+main:11: error: Callable[..., Any] has no attribute "x"
+
+[case testNestedGenericFunctions]
+from typing import TypeVar
+T = TypeVar('T')
+U = TypeVar('U')
+
+def outer(x: T) -> T:
+    def inner(y: U) -> T: ...
+    return inner(1)
+
+
+-- Casts
+-- -----
+
+
+[case testCastsToAndFromFunctionTypes]
+from typing import TypeVar, Callable, Any, cast
+t = TypeVar('t')
+def f(x: t,
+      f1: Callable[[], None],
+      f2: Callable[[Any], None], o: object) -> None:
+    x = cast(t, f1)
+    f1 = cast(Callable[[], None], x)
+    f1 = cast(Callable[[], None], f2)
+    f1 = cast(Callable[[], None], o)
+
+
+-- Function decorators
+-- -------------------
+
+
+[case testTrivialStaticallyTypedFunctionDecorator]
+from typing import TypeVar
+t = TypeVar('t')
+def dec(f: t) -> t:
+    return f
+ at dec
+def f(x: int) -> None: pass
+f(1)
+f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+[case testTrivialStaticallyTypedMethodDecorator]
+from typing import TypeVar
+t = TypeVar('t')
+def dec(f: t) -> t:
+    return f
+class A:
+    @dec
+    def f(self, x: int) -> None: pass
+A().f(1)
+A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+class B: pass
+
+[case testTrivialDecoratedNestedFunction]
+from typing import TypeVar
+t = TypeVar('t')
+def dec(f: t) -> t:
+    return f
+def g() -> None:
+    @dec
+    def f(x: int) -> None: pass
+    f(1)
+    f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+[out]
+
+[case testCheckingDecoratedFunction]
+import typing
+def dec(f): pass
+ at dec
+def f(x: 'A') -> None:
+    a = x # type: A
+    x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+class A: pass
+[out]
+
+[case testDecoratorThatSwitchesType]
+from typing import Callable
+def dec(x) -> Callable[[], None]: pass
+ at dec
+def f(y): pass
+f()
+f(None) # E: Too many arguments for "f"
+
+[case testDecoratorThatSwitchesTypeWithMethod]
+from typing import Any, Callable
+def dec(x) -> Callable[[Any], None]: pass
+class A:
+    @dec
+    def f(self, a, b, c): pass
+a = None # type: A
+a.f()
+a.f(None) # E: Too many arguments for "f" of "A"
+
+[case testNestedDecorators]
+from typing import Any, Callable
+def dec1(f: Callable[[Any], None]) -> Callable[[], None]: pass
+def dec2(f: Callable[[Any, Any], None]) -> Callable[[Any], None]: pass
+ at dec1
+ at dec2
+def f(x, y): pass
+f()
+f(None) # E: Too many arguments for "f"
+
+[case testInvalidDecorator1]
+from typing import Any, Callable
+def dec1(f: Callable[[Any], None]) -> Callable[[], None]: pass
+def dec2(f: Callable[[Any, Any], None]) -> Callable[[Any], None]: pass
+ at dec1 # E: Argument 1 to "dec2" has incompatible type Callable[[Any], Any]; expected Callable[[Any, Any], None]
+ at dec2
+def f(x): pass
+
+[case testInvalidDecorator2]
+from typing import Any, Callable
+def dec1(f: Callable[[Any, Any], None]) -> Callable[[], None]: pass
+def dec2(f: Callable[[Any, Any], None]) -> Callable[[Any], None]: pass
+ at dec1 # E: Argument 1 to "dec1" has incompatible type Callable[[Any], None]; expected Callable[[Any, Any], None]
+ at dec2
+def f(x, y): pass
+
+[case testNoTypeCheckDecoratorOnMethod1]
+from typing import no_type_check
+
+ at no_type_check
+def foo(x: 'bar', y: {'x': 4}) -> 42:
+    1 + 'x'
+
+[case testNoTypeCheckDecoratorOnMethod2]
+import typing
+
+ at typing.no_type_check
+def foo(x: 's', y: {'x': 4}) -> 42:
+    1 + 'x'
+
+ at typing.no_type_check
+def bar() -> None:
+    1 + 'x'
+
+[case testCallingNoTypeCheckFunction]
+import typing
+
+ at typing.no_type_check
+def foo(x: {1:2}) -> [1]:
+    1 + 'x'
+
+foo()
+foo(1, 'b')
+
+[case testCallingNoTypeCheckFunction2]
+import typing
+
+def f() -> None:
+    foo()
+
+ at typing.no_type_check
+def foo(x: {1:2}) -> [1]:
+    1 + 'x'
+
+[case testNoTypeCheckDecoratorSemanticError]
+import typing
+
+ at typing.no_type_check
+def foo(x: {1:2}) -> [1]:
+    x = y
+
+
+-- Forward references to decorated functions
+-- -----------------------------------------
+
+
+[case testForwardReferenceToDynamicallyTypedDecorator]
+def f(self) -> None:
+    g()
+    g(1)
+
+def dec(f):
+    return f
+
+ at dec
+def g():
+    pass
+
+[case testForwardReferenceToDecoratorWithAnyReturn]
+from typing import Any
+
+def f(self) -> None:
+    g()
+    g(1)
+
+def dec(f) -> Any:
+    return f
+
+ at dec
+def g():
+    pass
+
+[case testForwardReferenceToDecoratorWithIdentityMapping]
+from typing import TypeVar
+
+def f(self) -> None:
+    g()
+    g(1) # E: Too many arguments for "g"
+    h(1).x # E: "str" has no attribute "x"
+    h('') # E: Argument 1 to "h" has incompatible type "str"; expected "int"
+
+T = TypeVar('T')
+def dec(f: T) -> T:
+    return f
+
+ at dec
+def g(): pass
+ at dec
+def h(x: int) -> str: pass
+[out]
+
+[case testForwardReferenceToDynamicallyTypedDecoratedMethod]
+def f(self) -> None:
+    A().f(1).y
+    A().f()
+
+class A:
+    @dec
+    def f(self, x): pass
+
+def dec(f): return f
+[builtins fixtures/staticmethod.pyi]
+
+[case testForwardReferenceToStaticallyTypedDecoratedMethod]
+from typing import TypeVar
+
+def f(self) -> None:
+    A().f(1).y # E: "str" has no attribute "y"
+    A().f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+
+class A:
+    @dec
+    def f(self, a: int) -> str: return ''
+
+T = TypeVar('T')
+def dec(f: T) -> T: return f
+[builtins fixtures/staticmethod.pyi]
+[out]
+
+[case testForwardReferenceToDynamicallyTypedProperty]
+def f(self) -> None:
+    A().x.y
+
+class A:
+    @property
+    def x(self): pass
+[builtins fixtures/property.pyi]
+
+[case testForwardReferenceToStaticallyTypedProperty]
+def f(self) -> None:
+    A().x.y # E: "int" has no attribute "y"
+
+class A:
+    @property
+    def x(self) -> int: return 1
+[builtins fixtures/property.pyi]
+[out]
+
+[case testForwardReferenceToDynamicallyTypedStaticMethod]
+def f(self) -> None:
+    A.x(1).y
+    A.x() # E: Too few arguments for "x"
+
+class A:
+    @staticmethod
+    def x(x): pass
+[builtins fixtures/staticmethod.pyi]
+[out]
+
+[case testForwardReferenceToStaticallyTypedStaticMethod]
+def f(self) -> None:
+    A.x(1).y # E: "str" has no attribute "y"
+    A.x('') # E: Argument 1 to "x" of "A" has incompatible type "str"; expected "int"
+
+class A:
+    @staticmethod
+    def x(a: int) -> str: return ''
+[builtins fixtures/staticmethod.pyi]
+[out]
+
+[case testForwardReferenceToDynamicallyTypedClassMethod]
+def f(self) -> None:
+    A.x(1).y
+    A.x() # E: Too few arguments for "x"
+
+class A:
+    @classmethod
+    def x(cls, a): pass
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testForwardReferenceToStaticallyTypedClassMethod]
+def f(self) -> None:
+    A.x(1).y # E: "str" has no attribute "y"
+    A.x('') # E: Argument 1 to "x" of "A" has incompatible type "str"; expected "int"
+
+class A:
+    @classmethod
+    def x(cls, x: int) -> str: return ''
+[builtins fixtures/classmethod.pyi]
+[out]
+
+[case testForwardReferenceToDecoratedFunctionUsingMemberExpr]
+import m
+
+def f(self) -> None:
+    g(1).x # E: "str" has no attribute "x"
+
+ at m.dec
+def g(x: int) -> str: pass
+[file m.py]
+from typing import TypeVar
+T = TypeVar('T')
+def dec(f: T) -> T:
+    return f
+[out]
+
+[case testForwardReferenceToFunctionWithMultipleDecorators]
+def f(self) -> None:
+    g()
+    g(1)
+
+def dec(f):
+    return f
+
+ at dec
+ at dec2
+def g():
+    pass
+
+def dec2(f):
+    return f
+
+[case testForwardReferenceToDynamicallyTypedDecoratedStaticMethod]
+def f(self) -> None:
+    A().f(1).y
+    A().f()
+    A().g(1).y
+    A().g()
+
+class A:
+    @dec
+    @staticmethod
+    def f(self, x): pass
+    @staticmethod
+    @dec
+    def g(self, x): pass
+
+def dec(f): return f
+[builtins fixtures/staticmethod.pyi]
+
+[case testForwardRefereceToDecoratedFunctionWithCallExpressionDecorator]
+def f(self) -> None:
+    g()
+    g(1)
+
+ at dec(1)
+def g(): pass
+
+def dec(f): pass
+
+
+-- Decorator functions in import cycles
+-- ------------------------------------
+
+
+[case testDecoratorWithIdentityTypeInImportCycle]
+import a
+
+[file a.py]
+import b
+from d import dec
+ at dec
+def f(x: int) -> None: pass
+b.g(1) # E
+
+[file b.py]
+import a
+from d import dec
+ at dec
+def g(x: str) -> None: pass
+a.f('')
+
+[file d.py]
+from typing import TypeVar
+T = TypeVar('T')
+def dec(f: T) -> T: return f
+
+[out]
+tmp/b.py:5: error: Argument 1 to "f" has incompatible type "str"; expected "int"
+tmp/a.py:5: error: Argument 1 to "g" has incompatible type "int"; expected "str"
+
+[case testDecoratorWithNoAnnotationInImportCycle]
+import a
+
+[file a.py]
+import b
+from d import dec
+ at dec
+def f(x: int) -> None: pass
+b.g(1, z=4)
+
+[file b.py]
+import a
+from d import dec
+ at dec
+def g(x: str) -> None: pass
+a.f('', y=2)
+
+[file d.py]
+def dec(f): return f
+
+[case testDecoratorWithFixedReturnTypeInImportCycle]
+import a
+
+[file a.py]
+import b
+from d import dec
+ at dec
+def f(x: int) -> str: pass
+b.g(1)()
+
+[file b.py]
+import a
+from d import dec
+ at dec
+def g(x: int) -> str: pass
+a.f(1)()
+
+[file d.py]
+from typing import Callable
+def dec(f: Callable[[int], str]) -> Callable[[int], str]: return f
+
+[out]
+tmp/b.py:5: error: "str" not callable
+tmp/a.py:5: error: "str" not callable
+
+[case testDecoratorWithCallAndFixedReturnTypeInImportCycle]
+import a
+
+[file a.py]
+import b
+from d import dec
+ at dec()
+def f(x: int) -> str: pass
+b.g(1)()
+
+[file b.py]
+import a
+from d import dec
+ at dec()
+def g(x: int) -> str: pass
+a.f(1)()
+
+[file d.py]
+from typing import Callable
+def dec() -> Callable[[Callable[[int], str]], Callable[[int], str]]: pass
+
+[out]
+tmp/b.py:5: error: "str" not callable
+tmp/a.py:5: error: "str" not callable
+
+[case testDecoratorWithCallAndFixedReturnTypeInImportCycleAndDecoratorArgs]
+import a
+
+[file a.py]
+import b
+from d import dec
+ at dec(1)
+def f(x: int) -> str: pass
+b.g(1)()
+
+[file b.py]
+import a
+from d import dec
+ at dec(1)
+def g(x: int) -> str: pass
+a.f(1)()
+
+[file d.py]
+from typing import Callable
+def dec(x: str) -> Callable[[Callable[[int], str]], Callable[[int], str]]: pass
+
+[out]
+tmp/b.py:3: error: Argument 1 to "dec" has incompatible type "int"; expected "str"
+tmp/b.py:5: error: "str" not callable
+tmp/a.py:3: error: Argument 1 to "dec" has incompatible type "int"; expected "str"
+tmp/a.py:5: error: "str" not callable
+
+[case testUndefinedDecoratorInImportCycle]
+# cmd: mypy -m foo.base
+[file foo/__init__.py]
+import foo.base
+class Derived(foo.base.Base):
+    def method(self) -> None: pass
+[file foo/base.py]
+import foo
+class Base:
+    @decorator
+    def method(self) -> None: pass
+[out]
+tmp/foo/base.py:3: error: Name 'decorator' is not defined
+
+
+-- Conditional function definition
+-- -------------------------------
+
+
+[case testTypeCheckBodyOfConditionalFunction]
+from typing import Any
+x = None # type: Any
+if x:
+    def f(x: int) -> None:
+        x = 1
+        x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+[out]
+
+[case testCallConditionalFunction]
+from typing import Any
+x = None # type: Any
+if x:
+    def f(x: int) -> None:  pass
+    f(1)
+    f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+f(1)
+f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+[case testConditionalFunctionDefinitionWithIfElse]
+from typing import Any
+x = None # type: Any
+if x:
+    def f(x: int) -> None:
+        x = 'x'   # fail
+        x = 1
+else:
+    def f(x: int) -> None:
+        x + 'x'   # fail
+        x = 1
+f(1)
+f('x') # fail
+[out]
+main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+main:9: error: Unsupported operand types for + ("int" and "str")
+main:12: error: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+[case testNestedConditionalFunctionDefinitionWithIfElse]
+from typing import Any
+x = None # type: Any
+def top() -> None:
+    if x:
+        def f(x: int) -> None:
+            x = 'x'   # fail
+            x = 1
+    else:
+        def f(x: int) -> None:
+            x + 'x'   # fail
+            x = 1
+    f(1)
+    f('x') # fail
+[out]
+main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+main:10: error: Unsupported operand types for + ("int" and "str")
+main:13: error: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+[case testUnconditionalRedefinitionOfConditionalFunction]
+from typing import Any
+x = None # type: Any
+if x:
+    def f(): pass
+def f(): pass # E: Name 'f' already defined
+
+[case testIncompatibleConditionalFunctionDefinition]
+from typing import Any
+x = None # type: Any
+if x:
+    def f(x: int) -> None: pass
+else:
+    def f(x): pass # E: All conditional function variants must have identical signatures
+
+[case testIncompatibleConditionalFunctionDefinition2]
+from typing import Any
+x = None # type: Any
+if x:
+    def f(x: int) -> None: pass
+else:
+    def f(y: int) -> None: pass # E: All conditional function variants must have identical signatures
+
+[case testIncompatibleConditionalFunctionDefinition3]
+from typing import Any
+x = None # type: Any
+if x:
+    def f(x: int) -> None: pass
+else:
+    def f(x: int = 0) -> None: pass # E: All conditional function variants must have identical signatures
+
+[case testConditionalRedefinitionOfAnUnconditionalFunctionDefinition1]
+from typing import Any
+def f(x: str) -> None: pass
+x = None # type: Any
+if x:
+    def f(x: int) -> None: pass # E: All conditional function variants must have identical signatures
+
+[case testConditionalRedefinitionOfAnUnconditionalFunctionDefinition1]
+from typing import Any
+def f(x: int) -> None: pass # N: "f" defined here
+x = None # type: Any
+if x:
+    def f(y: int) -> None: pass # E: All conditional function variants must have identical signatures
+f(x=1) # The first definition takes precedence.
+f(y=1) # E: Unexpected keyword argument "y" for "f"
+
+[case testRedefineFunctionDefinedAsVariable]
+def g(): pass
+f = g
+if g():
+    def f(): pass
+f()
+f(1) # E: Too many arguments
+
+[case testRedefineFunctionDefinedAsVariableInitializedToNone]
+def g(): pass
+f = None
+if g():
+    def f(): pass
+f()
+f(1) # E: Too many arguments for "f"
+
+[case testRedefineNestedFunctionDefinedAsVariableInitializedToNone]
+def g() -> None:
+    f = None
+    if object():
+        def f(x: int) -> None: pass
+    f() # E: Too few arguments for "f"
+    f(1)
+    f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+[out]
+
+[case testRedefineFunctionDefinedAsVariableWithInvalidSignature]
+def g(): pass
+f = g
+if g():
+    def f(x): pass  # E: Incompatible redefinition (redefinition with type Callable[[Any], Any], original type Callable[[], Any])
+
+[case testRedefineFunctionDefinedAsVariableWithVariance1]
+class B: pass
+class C(B): pass
+def g(x: C) -> B: pass
+f = g
+if g(C()):
+    def f(x: C) -> C: pass
+
+[case testRedefineFunctionDefinedAsVariableWithVariance2]
+class B: pass
+class C(B): pass
+def g(x: C) -> B: pass
+f = g
+if g(C()):
+    def f(x: B) -> B: pass
+
+[case testRedefineFunctionDefinedAsVariableInitializedToEmptyList]
+f = [] # E: Need type annotation for variable
+if object():
+    def f(): pass # E: Incompatible redefinition
+f()
+f(1)
+[builtins fixtures/list.pyi]
+
+
+-- Conditional method definition
+-- -----------------------------
+
+
+[case testTypeCheckBodyOfConditionalMethod]
+from typing import Any
+x = None # type: Any
+class A:
+    if x:
+        def f(self, x: int) -> None:
+            x = 1
+            x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+[out]
+
+[case testCallConditionalMethodInClassBody]
+from typing import Any
+x = None # type: Any
+class A:
+    if x:
+        def f(self, x: int) -> None:  pass
+        f(x, 1)
+        f(x, 'x') # E: Argument 2 to "f" of "A" has incompatible type "str"; expected "int"
+    f(x, 1)
+    f(x, 'x') # E: Argument 2 to "f" of "A" has incompatible type "str"; expected "int"
+[out]
+
+[case testCallConditionalMethodViaInstance]
+from typing import Any
+x = None # type: Any
+class A:
+    if x:
+         def f(self, x: int) -> None: pass
+A().f(1)
+A().f('x') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+
+[case testConditionalMethodDefinitionWithIfElse]
+from typing import Any
+x = None # type: Any
+class A:
+    if x:
+        def f(self, x: int) -> None:
+            x = 'x'   # fail
+            x = 1
+    else:
+        def f(self, x: int) -> None:
+            x + 'x'   # fail
+            x = 1
+A().f(1)
+A().f('x') # fail
+[out]
+main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+main:10: error: Unsupported operand types for + ("int" and "str")
+main:13: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+
+[case testUnconditionalRedefinitionOfConditionalMethod]
+from typing import Any
+x = None # type: Any
+class A:
+    if x:
+        def f(self): pass
+    def f(self): pass # E: Name 'f' already defined
+
+[case testIncompatibleConditionalMethodDefinition]
+from typing import Any
+x = None # type: Any
+class A:
+    if x:
+        def f(self, x: int) -> None: pass
+    else:
+        def f(self, x): pass # E: All conditional function variants must have identical signatures
+[out]
+
+[case testConditionalFunctionDefinitionInTry]
+import typing
+try:
+    def f(x: int) -> None: pass
+except:
+    def g(x: str) -> None: pass
+f(1)
+f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+g('x')
+g(1) # E: Argument 1 to "g" has incompatible type "int"; expected "str"
+
+
+-- Callable[..., T]
+-- ----------------
+
+
+[case testCallableWithArbitraryArgs]
+from typing import Callable
+def f(x: Callable[..., int]) -> None:
+    x()
+    x(1)
+    x(z=1)
+    x() + '' # E: Unsupported operand types for + ("int" and "str")
+[out]
+
+[case testCallableWithArbitraryArgs2]
+from typing import Callable
+def f(x: Callable[..., int]) -> None:
+    x(*[1], **{'x': 2})
+[builtins fixtures/dict.pyi]
+
+[case testCastWithCallableAndArbitraryArgs]
+from typing import Callable, cast
+f = cast(Callable[..., int], None)
+f(x=4) + '' # E: Unsupported operand types for + ("int" and "str")
+
+[case testCallableWithArbitraryArgsInErrorMessage]
+from typing import Callable
+def f(x: Callable[..., int]) -> None:
+    x = 1  # E: Incompatible types in assignment (expression has type "int", variable has type Callable[..., int])
+[out]
+
+[case testCallableWithArbitraryArgsInGenericFunction]
+from typing import Callable, TypeVar
+T = TypeVar('T')
+def f(x: Callable[..., T]) -> T: pass
+def g(*x: int) -> str: pass
+x = f(g)
+x + 1 # E: Unsupported left operand type for + ("str")
+[builtins fixtures/list.pyi]
+
+[case testCallableWithArbitraryArgsSubtyping]
+from typing import Callable
+def f(x: Callable[..., int]) -> None: pass
+def g1(): pass
+def g2(x, y) -> int: pass
+def g3(*, y: str) -> int: pass
+def g4(*, y: int) -> str: pass
+f(g1)
+f(g2)
+f(g3)
+f(g4) # E: Argument 1 to "f" has incompatible type Callable[[NamedArg('y', int)], str]; expected Callable[..., int]
+
+[case testCallableWithArbitraryArgsSubtypingWithGenericFunc]
+from typing import Callable, TypeVar
+T = TypeVar('T')
+def f(x: Callable[..., int]) -> None: pass
+def g1(x: T) -> int: pass
+def g2(*x: T) -> int: pass
+def g3(*x: T) -> T: pass
+f(g1)
+f(g2)
+f(g3)
+
+-- (...) -> T
+-- ----------------
+[case testEllipsisWithArbitraryArgsOnBareFunction]
+def f(x, y, z): # type: (...) -> None
+    pass
+f(1, "hello", [])
+f(x=1, y="hello", z=[])
+[builtins fixtures/dict.pyi]
+
+[case testEllipsisWithArbitraryArgsOnBareFunctionWithDefaults]
+def f(x, y=1, z="hey"): # type: (...) -> None
+    pass
+f(1, "hello", [])
+f(x=1, y="hello", z=[])
+[builtins fixtures/dict.pyi]
+
+[case testEllipsisWithArbitraryArgsOnBareFunctionWithKwargs]
+from typing import Dict
+def f(x, **kwargs): # type: (...) -> None
+    success_dict_type = kwargs # type: Dict[str, str]
+    failure_dict_type = kwargs # type: Dict[int, str] # E: Incompatible types in assignment (expression has type Dict[str, Any], variable has type Dict[int, str])
+f(1, thing_in_kwargs=["hey"])
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testEllipsisWithArbitraryArgsOnBareFunctionWithVarargs]
+from typing import Tuple, Any
+def f(x, *args): # type: (...) -> None
+    success_tuple_type = args # type: Tuple[Any, ...]
+    fail_tuple_type = args # type: None # E: Incompatible types in assignment (expression has type Tuple[Any, ...], variable has type None)
+f(1, "hello")
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testEllipsisWithArbitraryArgsOnInstanceMethod]
+class A:
+    def f(self, x, y, z): # type: (...) -> None
+        pass
+
+[case testEllipsisWithArbitraryArgsOnClassMethod]
+class A:
+    @classmethod
+    def f(cls, x, y, z): # type: (...) -> None
+        pass
+[builtins fixtures/classmethod.pyi]
+
+[case testEllipsisWithArbitraryArgsOnStaticMethod]
+class A:
+    @staticmethod
+    def f(x, y, z): # type: (...) -> None
+        pass
+[builtins fixtures/staticmethod.pyi]
+
+[case testEllipsisWithSomethingAfterItFails]
+def f(x, y, z): # type: (..., int) -> None
+    pass
+[out]
+main:1: error: Ellipses cannot accompany other argument types in function type signature.
+
+[case testEllipsisWithSomethingBeforeItFails]
+def f(x, y, z): # type: (int, ...) -> None
+    pass
+[out]
+main:1: error: Ellipses cannot accompany other argument types in function type signature.
+
+[case testRejectCovariantArgument]
+from typing import TypeVar, Generic
+
+t = TypeVar('t', covariant=True)
+class A(Generic[t]):
+    def foo(self, x: t) -> None:
+        return None
+[builtins fixtures/bool.pyi]
+[out]
+main:5: error: Cannot use a covariant type variable as a parameter
+
+[case testRejectContravariantReturnType]
+from typing import TypeVar, Generic
+
+t = TypeVar('t', contravariant=True)
+class A(Generic[t]):
+    def foo(self) -> t:
+        return None
+[builtins fixtures/bool.pyi]
+[out]
+main:5: error: Cannot use a contravariant type variable as return type
+
+[case testAcceptCovariantReturnType]
+from typing import TypeVar, Generic
+
+t = TypeVar('t', covariant=True)
+class A(Generic[t]):
+    def foo(self) -> t:
+        return None
+[builtins fixtures/bool.pyi]
+[case testAcceptContravariantArgument]
+from typing import TypeVar, Generic
+
+t = TypeVar('t', contravariant=True)
+class A(Generic[t]):
+    def foo(self, x: t) -> None:
+        return None
+[builtins fixtures/bool.pyi]
+
+
+-- Redefining functions
+-- --------------------
+
+
+[case testRedefineFunction]
+def f(x) -> Any: pass
+def g(x, y): pass
+def h(x): pass
+def j(y) -> Any: pass
+f = h
+f = j # E: Incompatible types in assignment (expression has type Callable[[Arg('y', Any)], Any], variable has type Callable[[Arg('x', Any)], Any])
+f = g # E: Incompatible types in assignment (expression has type Callable[[Any, Any], Any], variable has type Callable[[Any], Any])
+
+[case testRedefineFunction2]
+def f() -> None: pass
+def f() -> None: pass # E: Name 'f' already defined
+
+
+-- Special cases
+-- -------------
+
+
+[case testFunctionDefinitionWithForStatement]
+for _ in [1]:
+    def f(): pass
+else:
+    def g(): pass
+f()
+g()
+[builtins fixtures/list.pyi]
+
+[case testFunctionDefinitionWithWhileStatement]
+while bool():
+    def f(): pass
+else:
+    def g(): pass
+f()
+g()
+[builtins fixtures/bool.pyi]
+
+[case testBareCallable]
+from typing import Callable, Any
+
+def foo(f: Callable) -> bool:
+    return f()
+
+def f1() -> bool:
+    return False
+
+foo(f1)
+[builtins fixtures/bool.pyi]
+
+[case testFunctionNestedWithinWith]
+from typing import Any
+a = 1  # type: Any
+with a:
+    def f() -> None:
+        pass
+    f(1) # E: Too many arguments for "f"
+
+
+[case testNameForDecoratorMethod]
+from typing import Callable
+
+class A:
+    def f(self) -> None:
+        # In particular, test that the error message contains "g" of "A".
+        self.g() # E: Too few arguments for "g" of "A"
+        self.g(1)
+    @dec
+    def g(self, x: str) -> None: pass
+
+def dec(f: Callable[[A, str], None]) -> Callable[[A, int], None]: pass
+[out]
+
+[case testUnknownFunctionNotCallable]
+def f() -> None:
+    pass
+def g(x: int) -> None:
+    pass
+h = f if bool() else g
+reveal_type(h) # E: Revealed type is 'builtins.function'
+h(7) # E: Cannot call function of unknown type
+[builtins fixtures/bool.pyi]
+
+-- Positional-only arguments
+-- -------------------------
+
+[case testPositionalOnlyArg]
+def f(__a: int) -> None: pass
+
+f(1)
+f(__a=1) # E: Unexpected keyword argument "__a" for "f"
+
+[builtins fixtures/bool.pyi]
+[out]
+main:1: note: "f" defined here
+
+[case testPositionalOnlyArgFastparse]
+# flags: --fast-parser
+
+def f(__a: int) -> None: pass
+
+f(1)
+f(__a=1) # E: Unexpected keyword argument "__a" for "f"
+
+[builtins fixtures/bool.pyi]
+[out]
+main:3: note: "f" defined here
+
+[case testMagicMethodPositionalOnlyArg]
+class A(object):
+    def __eq__(self, other) -> bool: return True # We are all equal.
+
+a = A()
+a.__eq__(a)
+a.__eq__(other=a) # E: Unexpected keyword argument "other" for "__eq__" of "A"
+
+[builtins fixtures/bool.pyi]
+
+[case testMagicMethodPositionalOnlyArgFastparse]
+# flags: --fast-parser
+
+class A(object):
+    def __eq__(self, other) -> bool: return True # We are all equal.
+
+a = A()
+a.__eq__(a)
+a.__eq__(other=a) # E: Unexpected keyword argument "other" for "__eq__" of "A"
+
+[builtins fixtures/bool.pyi]
+
+[case testTupleArguments]
+# flags: --python-version 2.7
+
+def f(a, (b, c), d): pass
+
+[case testTupleArgumentsFastparse]
+# flags: --fast-parser --python-version 2.7
+
+def f(a, (b, c), d): pass
diff --git a/test-data/unit/check-generic-subtyping.test b/test-data/unit/check-generic-subtyping.test
new file mode 100644
index 0000000..35cd0f9
--- /dev/null
+++ b/test-data/unit/check-generic-subtyping.test
@@ -0,0 +1,746 @@
+-- Test cases for the type checker related to subtyping and inheritance with
+-- generics.
+
+
+-- Subtyping + inheritance
+-- -----------------------
+
+
+[case testSubtypingAndInheritingNonGenericTypeFromGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+ac = None # type: A[C]
+ad = None # type: A[D]
+b = None # type: B
+
+b = ad # E: Incompatible types in assignment (expression has type A[D], variable has type "B")
+ad = b # E: Incompatible types in assignment (expression has type "B", variable has type A[D])
+b = ac # E: Incompatible types in assignment (expression has type A[C], variable has type "B")
+
+b = b
+ac = b
+
+class C: pass
+class A(Generic[T]): pass
+class B(A[C]): pass
+class D: pass
+
+[case testSubtypingAndInheritingGenericTypeFromNonGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+a = None # type: A
+bc = None # type: B[C]
+bd = None # type: B[D]
+
+bc = bd # E: Incompatible types in assignment (expression has type B[D], variable has type B[C])
+bd = bc # E: Incompatible types in assignment (expression has type B[C], variable has type B[D])
+bc = a  # E: Incompatible types in assignment (expression has type "A", variable has type B[C])
+bd = a  # E: Incompatible types in assignment (expression has type "A", variable has type B[D])
+
+a = bc
+a = bd
+
+class A: pass
+class B(A, Generic[T]): pass
+class C: pass
+class D: pass
+
+[case testSubtypingAndInheritingGenericTypeFromGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+ac = None # type: A[C]
+ad = None # type: A[D]
+bcc = None # type: B[C, C]
+bdc = None # type: B[D, C]
+
+ad = bcc # E: Incompatible types in assignment (expression has type B[C, C], variable has type A[D])
+ad = bdc # E: Incompatible types in assignment (expression has type B[D, C], variable has type A[D])
+bcc = ac # E: Incompatible types in assignment (expression has type A[C], variable has type B[C, C])
+bdc = ac # E: Incompatible types in assignment (expression has type A[C], variable has type B[D, C])
+
+bcc = bcc
+bdc = bdc
+ac = bcc
+ac = bdc
+
+class A(Generic[T]): pass
+class B(A[S], Generic[T, S]): pass
+class C: pass
+class D: pass
+
+[case testSubtypingAndInheritingGenericTypeFromGenericTypeAcrossHierarchy]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+X = TypeVar('X')
+Y = TypeVar('Y')
+ae = None # type: A[A[E]]
+af = None # type: A[A[F]]
+
+cef = None # type: C[E, F]
+cff = None # type: C[F, F]
+cfe = None # type: C[F, E]
+
+ae = cef # E: Incompatible types in assignment (expression has type C[E, F], variable has type A[A[E]])
+af = cfe # E: Incompatible types in assignment (expression has type C[F, E], variable has type A[A[F]])
+
+ae = cfe
+af = cef
+af = cff
+
+class A(Generic[T]): pass
+class B(A[S], Generic[T, S]): pass
+class C(B[A[X], A[Y]], Generic[X, Y]): pass
+class E: pass
+class F: pass
+
+[case testIncludingBaseClassTwice]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class I(Generic[t]): pass
+class A(I[C], I[object]): pass # E: Duplicate base class "I"
+class C: pass
+
+
+-- Accessing inherited generic members
+-- -----------------------------------
+
+
+[case testAccessingMethodInheritedFromGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+b = None # type: B[C, D]
+c, d = None, None # type: (C, D)
+
+b.f(c) # E: Argument 1 to "f" of "A" has incompatible type "C"; expected "D"
+b.f(d)
+
+class A(Generic[T]):
+    def f(self, a: T) -> None:
+        pass
+class B(A[S], Generic[T, S]): pass
+class C: pass
+class D: pass
+
+[case testAccessingMethodInheritedFromGenericTypeInNonGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+b, c, d = None, None, None # type: (B, C, D)
+
+b.f(c) # E: Argument 1 to "f" of "A" has incompatible type "C"; expected "D"
+b.f(d)
+
+class C: pass
+class D: pass
+class A(Generic[T]):
+    def f(self, a: T) -> None:
+        pass
+class B(A[D]): pass
+
+[case testAccessingMemberVarInheritedFromGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class A(Generic[T]):
+    def __init__(self, a: T) -> None:
+        self.a = a
+
+b = None # type: B[C, D]
+c, d = None, None # type: (C, D)
+
+b.a = c # E: Incompatible types in assignment (expression has type "C", variable has type "D")
+b.a = d
+
+class B(A[S], Generic[T, S]): pass
+class C: pass
+class D: pass
+
+
+-- Overriding with generic types
+-- -----------------------------
+
+
+[case testOverridingMethodInSimpleTypeInheritingGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class B(Generic[T]):
+    def f(self, a: T) -> None: pass
+    def g(self, a: T) -> None: pass
+class C: pass
+class D: pass
+class A(B[C]):
+    def f(self, a: D) -> None: pass \
+        # E: Argument 1 of "f" incompatible with supertype "B"
+    def g(self, a: C) -> None: pass
+[out]
+
+[case testOverridingMethodInGenericTypeInheritingSimpleType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class C: pass
+class B:
+    def f(self, a: C) -> None: pass
+    def g(self, a: C) -> None: pass
+class A(B, Generic[T]):
+    def f(self, a: T) -> None: pass \
+        # E: Argument 1 of "f" incompatible with supertype "B"
+    def g(self, a: 'C') -> None: pass
+[out]
+
+[case testOverridingMethodInGenericTypeInheritingGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class B(Generic[T]):
+    def f(self, a: T) -> None: pass
+    def g(self, a: T) -> None: pass
+class A(B[S], Generic[T, S]):
+    def f(self, a: T) -> None: pass \
+        # E: Argument 1 of "f" incompatible with supertype "B"
+    def g(self, a: S) -> None: pass
+[out]
+
+[case testOverridingMethodInMultilevelHierarchyOfGenericTypes]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+U = TypeVar('U')
+V = TypeVar('V')
+
+class D: pass
+class C(Generic[T, U, V]):
+    def f(self, a: V) -> None: pass
+    def g(self, a: V) -> None: pass
+class B(C[D, D, T], Generic[T]): pass
+class A(B[S], Generic[T, S]):
+    def f(self, a: T) -> None: pass \
+        # E: Argument 1 of "f" incompatible with supertype "C"
+    def g(self, a: S) -> None: pass
+[out]
+
+[case testOverrideGenericMethodInNonGenericClass]
+from typing import TypeVar
+
+T = TypeVar('T')
+S = TypeVar('S')
+
+class A:
+    def f(self, x: T, y: S) -> None: pass
+class B(A):
+    def f(self, x: S, y: T) -> None: pass
+class C(A):
+    # Okay, because T = object allows any type for the arguments.
+    def f(self, x: T, y: T) -> None: pass
+
+[case testOverrideGenericMethodInNonGenericClassLists]
+from typing import TypeVar, List
+
+T = TypeVar('T')
+S = TypeVar('S')
+
+class A:
+    def f(self, x: List[T], y: List[S]) -> None: pass
+class B(A):
+    def f(self, x: List[S], y: List[T]) -> None: pass
+class C(A):
+    def f(self, x: List[T], y: List[T]) -> None: pass # E: Signature of "f" incompatible with supertype "A"
+[builtins fixtures/list.pyi]
+[out]
+
+[case testOverrideGenericMethodInNonGenericClassGeneralize]
+from typing import TypeVar
+
+T = TypeVar('T')
+T1 = TypeVar('T1', bound=str)
+S = TypeVar('S')
+
+class A:
+    def f(self, x: int, y: S) -> None: pass
+class B(A):
+    def f(self, x: T, y: S) -> None: pass
+class C(A):
+    def f(self, x: T, y: str) -> None: pass
+class D(A):
+    def f(self, x: T1, y: S) -> None: pass # TODO: This error could be more specific.
+[out]
+main:12: error: Argument 2 of "f" incompatible with supertype "A"
+main:14: error: Signature of "f" incompatible with supertype "A"
+
+
+-- Inheritance from generic types with implicit dynamic supertype
+-- --------------------------------------------------------------
+
+
+[case testInheritanceFromGenericWithImplicitDynamicAndSubtyping]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+a = None # type: A
+bc = None # type: B[C]
+bd = None # type: B[D]
+
+a = bc # E: Incompatible types in assignment (expression has type B[C], variable has type "A")
+bc = a
+bd = a
+
+class B(Generic[T]): pass
+class A(B): pass
+class C: pass
+class D: pass
+[out]
+
+[case testInheritanceFromGenericWithImplicitDynamicAndExternalAccess]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+
+class B(Generic[T]):
+    def f(self, a: 'B[T]') -> None: pass
+    def __init__(self, x: 'B[T]') -> None:
+        self.x = x
+class A(B): pass
+class C: pass
+
+a = None # type: A
+c = None # type: C
+bc = None # type: B[C]
+
+a.x = c # E: Incompatible types in assignment (expression has type "C", variable has type B[Any])
+a.f(c)  # E: Argument 1 to "f" of "B" has incompatible type "C"; expected B[Any]
+a.x = bc
+a.f(bc)
+[out]
+
+[case testInheritanceFromGenericWithImplicitDynamic]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+a = None # type: A
+c = None # type: C
+bc = None # type: B[C]
+
+class B(Generic[T]):
+  def f(self, a: 'B[T]') -> None: pass
+  def __init__(self, x: 'B[T]') -> None:
+    self.x = x
+
+class A(B):
+  def g(self) -> None:
+    self.x = c # E: Incompatible types in assignment (expression has type "C", variable has type B[Any])
+    self.f(c)  # E: Argument 1 to "f" of "B" has incompatible type "C"; expected B[Any]
+    self.x = bc
+    self.f(bc)
+
+class C: pass
+[out]
+
+[case testInheritanceFromGenericWithImplicitDynamicAndOverriding]
+from typing import TypeVar, Generic, Tuple
+T = TypeVar('T')
+class B(Generic[T]):
+    def f(self, a: T, b: 'Tuple[T, B[T]]') -> None:
+        pass
+class A(B):
+    def f(self, a, b): pass
+[builtins fixtures/tuple.pyi]
+[out]
+
+
+-- Inheritance from generic types and super expressions
+-- ----------------------------------------------------
+
+
+[case testSuperExpressionsWhenInheritingFromGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class B(Generic[T]):
+    def f(self, a: T) -> None: pass
+class A(B[S], Generic[T, S]):
+    def g(self, t: T, s: S) -> None:
+        super().f(t)   # E: Argument 1 to "f" of "B" has incompatible type "T"; expected "S"
+        super().f(s)
+[out]
+
+[case testSuperExpressionsWhenInheritingFromGenericTypeAndDeepHierarchy]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+U = TypeVar('U')
+V = TypeVar('V')
+class C(Generic[T, U, V]):
+    def f(self, a: V) -> None: pass
+class D: pass
+class B(C[D, D, T], Generic[T]): pass
+class A(B[S], Generic[T, S]):
+    def g(self, t: T, s: S) -> None:
+        super().f(t)   # E: Argument 1 to "f" of "C" has incompatible type "T"; expected "S"
+        super().f(s)
+[out]
+
+
+-- Type of inherited constructor
+-- -----------------------------
+
+
+[case testInheritedConstructor]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    def __init__(self, x: T) -> None: pass
+class B(A[T], Generic[T]): pass
+class C(A[int]): pass
+class D(A[A[T]], Generic[T]): pass
+B(1)
+C(1)
+C('a')  # E: Argument 1 to "C" has incompatible type "str"; expected "int"
+D(A(1))
+D(1)  # E: Argument 1 to "D" has incompatible type "int"; expected A[None]
+
+
+[case testInheritedConstructor2]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+U = TypeVar('U')
+Z = TypeVar('Z')
+class A(Generic[T, U]):
+    def __init__(self, x: T, y: U, z: Z) -> None: pass
+class B(A[int, T], Generic[T]): pass
+class C(B[A[T, str]], Generic[T, U]): pass
+# C[T, U] <: B[A[T, str]] <: A[int, A[T, str]]
+C(1, A(1, 'a', 0), 'z')
+C(1, A('1', 'a', 0), 'z')
+C('1', A(1, 'a', 0), 'z')  # E: Argument 1 to "C" has incompatible type "str"; expected "int"
+C(1, A(1, 1, 0), 'z')  # E: Argument 2 to "A" has incompatible type "int"; expected "str"
+
+
+-- Subtyping with a generic abstract base class
+-- --------------------------------------------
+
+
+[case testSubtypingWithGenericTypeSubclassingGenericAbstractClass]
+from typing import TypeVar, Generic
+from abc import abstractmethod
+T = TypeVar('T')
+S = TypeVar('S')
+acd = None # type: A[C, D]
+adc = None # type: A[D, C]
+ic = None # type: I[C]
+id = None # type: I[D]
+
+ic = acd # E: Incompatible types in assignment (expression has type A[C, D], variable has type I[C])
+id = adc # E: Incompatible types in assignment (expression has type A[D, C], variable has type I[D])
+adc = ic # E: Incompatible types in assignment (expression has type I[C], variable has type A[D, C])
+
+ic = adc
+id = acd
+
+class I(Generic[T]):
+    @abstractmethod
+    def f(self): pass
+class A(I[S], Generic[T, S]): pass
+class C: pass
+class D: pass
+
+[case testSubtypingWithTypeImplementingGenericABCViaInheritance]
+from typing import TypeVar, Generic
+S = TypeVar('S')
+a, b = None, None # type: (A, B)
+ic, id, ie = None, None, None # type: (I[C], I[D], I[E])
+
+class I(Generic[S]): pass
+class B(I[C]): pass
+class A(B): pass
+
+ie = a # E: Incompatible types in assignment (expression has type "A", variable has type I[E])
+a = ic # E: Incompatible types in assignment (expression has type I[C], variable has type "A")
+a = id # E: Incompatible types in assignment (expression has type I[D], variable has type "A")
+a = b  # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+id = a # E: Incompatible types in assignment (expression has type "A", variable has type I[D])
+
+ic = a
+b = a
+
+class C: pass
+class D: pass
+class E: pass
+[out]
+
+[case testSubtypingWithTypeImplementingGenericABCViaInheritance2-skip]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class I(Generic[T]): pass
+class A(I[C]): pass
+class B(A, I[D]): pass # Fail
+
+class C: pass
+class D: pass
+[out]
+main:5: error: Class "B" has base "I" duplicated inconsistently
+
+[case testSubtypingAndABCExtension]
+from typing import TypeVar, Generic
+from abc import abstractmethod, ABCMeta
+t = TypeVar('t')
+a, i, j = None, None, None # type: (A[object], I[object], J[object])
+(ii, jj) = (i, j)
+ii = a
+jj = a
+jj = i
+a = i # E: Incompatible types in assignment (expression has type I[object], variable has type A[object])
+a = j # E: Incompatible types in assignment (expression has type J[object], variable has type A[object])
+
+class J(Generic[t]): pass
+class X(metaclass=ABCMeta): pass
+class I(X, J[t], Generic[t]): pass
+class A(I[t], Generic[t]): pass
+
+
+-- Subclassing a generic ABC
+-- -------------------------
+
+
+[case testSubclassingGenericABC1]
+from typing import TypeVar, Generic
+from abc import abstractmethod
+T = TypeVar('T')
+class I(Generic[T]):
+    @abstractmethod
+    def f(self, a: T) -> None: pass
+    @abstractmethod
+    def g(self, a: T) -> None: pass
+class A(I[C]):
+    def f(self, a: 'D') -> None: pass \
+        # E: Argument 1 of "f" incompatible with supertype "I"
+    def g(self, a: 'C') -> None: pass
+class C: pass
+class D: pass
+[out]
+
+
+-- Extending a generic ABC with deep type hierarchy
+-- ------------------------------------------------
+
+
+[case testSubclassingGenericABCWithDeepHierarchy]
+from typing import Any, TypeVar, Generic
+from abc import abstractmethod
+T = TypeVar('T')
+a = None # type: A
+ic, id = None, None # type: (I[C], I[D])
+
+id = a # Fail
+ic = a
+
+class I(Generic[T]):
+    @abstractmethod
+    def f(self, a: T, b: T) -> None: pass
+    @abstractmethod
+    def g(self, a: T, b: 'D') -> None: pass
+class B(I[C]):
+    def f(self, a: 'C', b: 'C') -> None: pass
+    def g(self, a: 'C', b: Any) -> None: pass
+class A(B):
+    def g(self, a: 'C', b: 'C') -> None: pass \
+        # E: Argument 2 of "g" incompatible with supertype "I"
+    def f(self, a: 'C', b: 'C') -> None: pass
+class C: pass
+class D: pass
+[out]
+main:7: error: Incompatible types in assignment (expression has type "A", variable has type I[D])
+
+[case testSubclassingGenericABCWithDeepHierarchy2]
+from typing import Any, TypeVar, Generic
+from abc import abstractmethod
+T = TypeVar('T')
+class I(Generic[T]):
+    @abstractmethod
+    def f(self, a: T, b: T) -> None: pass
+class B(I[C]):
+    def f(self, a: 'C', b: Any) -> None: pass
+class A(B):
+    def f(self, a: 'C', b: 'D') -> None: pass \
+        # E: Argument 2 of "f" incompatible with supertype "I"
+class C: pass
+class D: pass
+[out]
+
+
+-- Implicit Any types and subclassing generic ABC
+-- ----------------------------------------------
+
+
+[case testSubclassingGenericABCWithImplicitAny]
+from typing import Any, TypeVar, Generic
+from abc import abstractmethod
+T = TypeVar('T')
+a = None # type: Any
+ic = None # type: I[C]
+id = None # type: I[D]
+
+ic = a
+id = a
+
+class I(Generic[T]):
+    @abstractmethod
+    def f(self, a: T) -> None: pass
+class A(I):
+    def f(self, a): pass
+
+class C: pass
+class D: pass
+
+[case testSubclassingGenericABCWithImplicitAnyAndDeepHierarchy]
+from typing import Any, TypeVar, Generic
+from abc import abstractmethod
+T = TypeVar('T')
+a = None # type: Any
+ic = None # type: I[C]
+id = None # type: I[D]
+
+ic = a
+id = a
+
+class I(Generic[T]):
+    @abstractmethod
+    def f(self, a: T, b: T) -> None: pass
+class B(I):
+    def f(self, a, b): pass
+class A(B):
+    def f(self, a: 'C', b: 'D') -> None: pass
+class C: pass
+class D: pass
+
+[case testImplementingGenericABCWithImplicitAnyAndDeepHierarchy2]
+from typing import Any, TypeVar, Generic
+from abc import abstractmethod
+T = TypeVar('T')
+a = None # type: Any
+jc = None # type: J[C]
+jd = None # type: J[D]
+
+jc = a
+jd = a
+
+class J(Generic[T]):
+    @abstractmethod
+    def f(self, a: T, b: T) -> None: pass
+class I(J):
+    @abstractmethod
+    def f(self, a, b): pass
+class A(I):
+    def f(self, a: 'C', b: 'D') -> None: pass
+
+class C: pass
+class D: pass
+
+
+-- Accessing generic ABC members
+-- -----------------------------
+
+
+[case testAccessingGenericABCMembers]
+from typing import TypeVar, Generic
+from abc import abstractmethod
+T = TypeVar('T')
+class I(Generic[T]):
+    @abstractmethod
+    def f(self, a: T) -> None: pass
+class A: pass
+class B: pass
+
+a, b = None, None # type: (A, B)
+ia = None # type: I[A]
+
+ia.f(b)  # E: Argument 1 to "f" of "I" has incompatible type "B"; expected "A"
+ia.f(a)
+
+[case testAccessingInheritedGenericABCMembers]
+from typing import TypeVar, Generic
+from abc import abstractmethod
+T = TypeVar('T')
+class J(Generic[T]):
+    @abstractmethod
+    def f(self, a: T) -> None: pass
+class I(J[T], Generic[T]): pass
+class A: pass
+class B: pass
+a, b = None, None # type: (A, B)
+ia = None # type: I[A]
+
+ia.f(b)  # E: Argument 1 to "f" of "J" has incompatible type "B"; expected "A"
+ia.f(a)
+
+
+-- Misc
+-- ----
+
+
+[case testMultipleAssignmentAndGenericSubtyping]
+from typing import Iterable
+n, s = None, None # type: int, str
+class Nums(Iterable[int]):
+    def __iter__(self): pass
+    def __next__(self): pass
+n, n = Nums()
+s, s = Nums() # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+[builtins fixtures/for.pyi]
+[out]
+
+
+-- Variance
+-- --------
+
+
+[case testCovariant]
+from typing import TypeVar, Generic
+T = TypeVar('T', covariant=True)
+
+class G(Generic[T]): pass
+class A: pass
+class B(A): pass
+class C(B): pass
+
+a = None  # type: G[A]
+b = None  # type: G[B]
+c = None  # type: G[C]
+
+b = a  # E: Incompatible types in assignment (expression has type G[A], variable has type G[B])
+b = c
+[builtins fixtures/bool.pyi]
+[out]
+
+[case testContravariant]
+from typing import TypeVar, Generic
+T = TypeVar('T', contravariant=True)
+
+class G(Generic[T]): pass
+class A: pass
+class B(A): pass
+class C(B): pass
+
+a = None  # type: G[A]
+b = None  # type: G[B]
+c = None  # type: G[C]
+
+b = a
+b = c  # E: Incompatible types in assignment (expression has type G[C], variable has type G[B])
+[builtins fixtures/bool.pyi]
+[out]
+
+[case testInvariant]
+from typing import TypeVar, Generic
+T = TypeVar('T')  # invariant (default)
+
+class G(Generic[T]): pass
+class A: pass
+class B(A): pass
+class C(B): pass
+
+a = None  # type: G[A]
+b = None  # type: G[B]
+c = None  # type: G[C]
+
+b = a  # E: Incompatible types in assignment (expression has type G[A], variable has type G[B])
+b = c  # E: Incompatible types in assignment (expression has type G[C], variable has type G[B])
+[builtins fixtures/bool.pyi]
+[out]
diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test
new file mode 100644
index 0000000..81eb74b
--- /dev/null
+++ b/test-data/unit/check-generics.test
@@ -0,0 +1,1462 @@
+-- Simple generic types
+-- --------------------
+
+
+[case testGenericMethodReturnType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+a, b, c = None, None, None # type: (A[B], B, C)
+c = a.f() # Fail
+b = a.f()
+
+class A(Generic[T]):
+    def f(self) -> T: pass
+
+class B: pass
+class C: pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type "B", variable has type "C")
+
+[case testGenericMethodArgument]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+a.f(c) # Fail
+a.f(b)
+
+a = None # type: A[B]
+b = None # type: B
+c = None # type: C
+
+class A(Generic[T]):
+    def f(self, a: T) -> None: pass
+
+class B: pass
+class C: pass
+[out]
+main:3: error: Argument 1 to "f" of "A" has incompatible type "C"; expected "B"
+
+[case testGenericMemberVariable]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    def __init__(self, v: T) -> None:
+        self.v = v
+
+a, b, c = None, None, None # type: (A[B], B, C)
+a.v = c # Fail
+a.v = b
+
+class B: pass
+class C: pass
+[out]
+main:8: error: Incompatible types in assignment (expression has type "C", variable has type "B")
+
+[case testGenericMemberVariable]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+a, b, c = None, None, None # type: (A[B], B, C)
+a.v = c # Fail
+a.v = b
+
+class A(Generic[T]):
+    v = None # type: T
+class B: pass
+class C: pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type "C", variable has type "B")
+
+[case testSimpleGenericSubtyping]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+b, bb, c = None, None, None # type: (A[B], A[B], A[C])
+c = b # Fail
+b = c # Fail
+
+b = b
+b = bb
+
+class A(Generic[T]): pass
+class B: pass
+class C(B): pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type A[B], variable has type A[C])
+main:5: error: Incompatible types in assignment (expression has type A[C], variable has type A[B])
+
+[case testGenericTypeCompatibilityWithAny]
+from typing import Any, TypeVar, Generic
+T = TypeVar('T')
+b, c, d = None, None, None # type: (A[B], A[C], A[Any])
+
+b = d
+c = d
+d = b
+d = c
+
+class A(Generic[T]): pass
+class B: pass
+class C(B): pass
+[out]
+
+[case testTypeVariableAsTypeArgument]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+a = None # type: A[B]
+b = None # type: A[B]
+c = None # type: A[C]
+
+a.v = c # Fail
+c = a.v # Fail
+a.v = b
+b = a.v
+
+class A(Generic[T]):
+    v = None # type: A[T]
+
+class B: pass
+class C: pass
+[out]
+main:7: error: Incompatible types in assignment (expression has type A[C], variable has type A[B])
+main:8: error: Incompatible types in assignment (expression has type A[B], variable has type A[C])
+
+[case testMultipleGenericTypeParametersWithMemberVars]
+from typing import TypeVar, Generic
+S = TypeVar('S')
+T = TypeVar('T')
+a = None # type: A[B, C]
+s = None # type: B
+t = None # type: C
+
+t = a.s # Fail
+s = a.t # Fail
+
+s = a.s
+t = a.t
+
+class A(Generic[S, T]):
+    s = None # type: S
+    t = None # type: T
+class B: pass
+class C: pass
+[out]
+main:8: error: Incompatible types in assignment (expression has type "B", variable has type "C")
+main:9: error: Incompatible types in assignment (expression has type "C", variable has type "B")
+
+[case testMultipleGenericTypeParametersWithMethods]
+from typing import TypeVar, Generic
+S = TypeVar('S')
+T = TypeVar('T')
+a = None # type: A[B, C]
+s = None # type: B
+t = None # type: C
+
+a.f(s, s) # Fail
+a.f(t, t) # Fail
+a.f(s, t)
+
+class A(Generic[S, T]):
+    def f(self, s: S, t: T) -> None: pass
+class B: pass
+class C: pass
+[out]
+main:8: error: Argument 2 to "f" of "A" has incompatible type "B"; expected "C"
+main:9: error: Argument 1 to "f" of "A" has incompatible type "C"; expected "B"
+
+[case testMultipleGenericTypeParametersAndSubtyping]
+from typing import TypeVar, Generic
+S = TypeVar('S')
+T = TypeVar('T')
+bc = None # type: A[B, C]
+bb = None # type: A[B, B]
+cb = None # type: A[C, B]
+
+bb = bc # Fail
+bb = cb # Fail
+bc = bb # Fail
+
+bb = bb
+bc = bc
+
+class A(Generic[S, T]):
+    s = None # type: S
+    t = None # type: T
+
+class B: pass
+class C(B):pass
+[out]
+main:8: error: Incompatible types in assignment (expression has type A[B, C], variable has type A[B, B])
+main:9: error: Incompatible types in assignment (expression has type A[C, B], variable has type A[B, B])
+main:10: error: Incompatible types in assignment (expression has type A[B, B], variable has type A[B, C])
+
+
+-- Simple generic type bodies
+-- --------------------------
+
+
+[case testGenericTypeBody1]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    a = None # type: T
+
+    def f(self, b: T) -> T:
+        self.f(x)     # Fail
+        d = self # type: A[B] # Fail
+        self.a = self.f(self.a)
+        return self.a
+        c = self # type: A[T]
+x = None # type: B
+class B: pass
+[out]
+main:7: error: Argument 1 to "f" of "A" has incompatible type "B"; expected "T"
+main:8: error: Incompatible types in assignment (expression has type A[T], variable has type A[B])
+
+[case testGenericTypeBodyWithMultipleVariables]
+from typing import TypeVar, Generic
+S = TypeVar('S')
+T = TypeVar('T')
+class A(Generic[S, T]):
+    def f(self) -> None:
+        s = None # type: S
+        t = None # type: T
+        s = t # Fail
+        t = s # Fail
+        a = self # type: A[S, B] # Fail
+        b = self # type: A[T, T] # Fail
+        c = self # type: A[S, T]
+        t = t
+
+class B: pass
+[out]
+main:8: error: Incompatible types in assignment (expression has type "T", variable has type "S")
+main:9: error: Incompatible types in assignment (expression has type "S", variable has type "T")
+main:10: error: Incompatible types in assignment (expression has type A[S, T], variable has type A[S, B])
+main:11: error: Incompatible types in assignment (expression has type A[S, T], variable has type A[T, T])
+
+[case testCompatibilityOfNoneWithTypeVar]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    def f(self) -> None:
+        a = None # type: T
+        a = None
+[out]
+
+[case testCompatibilityOfTypeVarWithObject]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    def f(self) -> T:
+        a = object() # type: T  # Fail
+        a = object()    # Fail
+        b = self.f() # type: object
+        b = self.f()
+[out]
+main:5: error: Incompatible types in assignment (expression has type "object", variable has type "T")
+main:6: error: Incompatible types in assignment (expression has type "object", variable has type "T")
+
+
+-- Operations with generic types
+-- -----------------------------
+
+
+[case testGenericOperations]
+from typing import TypeVar, Generic
+S = TypeVar('S')
+T = TypeVar('T')
+a = None # type: A[B, C]
+b = None # type: B
+c = None # type: C
+
+b = a + b # Fail
+c = a + c # Fail
+c = a[c]  # Fail
+b = a[b]  # Fail
+
+c = a + b
+b = a[c]
+
+class A(Generic[S, T]):
+    def __add__(self, a: S) -> T: pass
+    def __getitem__(self, i: T) -> S: pass
+
+class B: pass
+class C: pass
+[out]
+main:8: error: Incompatible types in assignment (expression has type "C", variable has type "B")
+main:9: error: Unsupported operand types for + ("A" and "C")
+main:10: error: Incompatible types in assignment (expression has type "B", variable has type "C")
+main:11: error: Invalid index type "B" for "A"; expected type "C"
+
+[case testOperatorAssignmentWithIndexLvalue1]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+b = None # type: B
+c = None # type: C
+ac = None # type: A[C]
+
+ac[b] += b # Fail
+ac[c] += c # Fail
+ac[b] += c
+ac[b] = ac[b] + c
+
+class A(Generic[T]):
+    def __getitem__(self, i: 'B') -> T: pass
+    def __setitem__(self, i: 'B', v: T) -> None: pass
+
+class B: pass
+class C:
+    def __add__(self, o: 'C') -> 'C': pass
+[out]
+main:7: error: Unsupported operand types for + ("C" and "B")
+main:7: error: Incompatible types in assignment (expression has type "B", target has type "C")
+main:8: error: Invalid index type "C" for "A"; expected type "B"
+
+[case testOperatorAssignmentWithIndexLvalue2]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+b = None # type: B
+c = None # type: C
+ac = None # type: A[C]
+
+ac[b] += c        # Fail
+ac[c] += c        # Fail
+ac[b] = ac[b] + c # Fail
+
+class A(Generic[T]):
+    def __getitem__(self, i: 'B') -> T: pass
+    def __setitem__(self, i: 'C', v: T) -> None: pass
+
+class B: pass
+class C:
+    def __add__(self, o: 'C') -> 'C': pass
+[out]
+main:7: error: Invalid index type "B" for "A"; expected type "C"
+main:8: error: Invalid index type "C" for "A"; expected type "B"
+main:9: error: Invalid index type "B" for "A"; expected type "C"
+
+
+-- Nested generic types
+-- --------------------
+
+
+[case testNestedGenericTypes]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+aab = None # type: A[A[B]]
+aac = None # type: A[A[C]]
+ab = None # type: A[B]
+ac = None # type: A[C]
+
+ac = aab.x # Fail
+ac.y = aab # Fail
+
+ab = aab.x
+ac = aac.x
+ab.y = aab
+ac.y = aac
+
+class A(Generic[T]):
+    x = None # type: T
+    y = None # type: A[A[T]]
+
+class B:
+    pass
+class C:
+    pass
+[out]
+main:8: error: Incompatible types in assignment (expression has type A[B], variable has type A[C])
+main:9: error: Incompatible types in assignment (expression has type A[A[B]], variable has type A[A[C]])
+
+
+-- Generic functions
+-- -----------------
+
+
+[case testTypeCheckingGenericFunctionBody]
+from typing import TypeVar, Generic
+S = TypeVar('S')
+T = TypeVar('T')
+class A: pass
+class p(Generic[T, S]):
+    def __init__(self, t: T, a: S) -> None: pass
+def f(s: S, t: T) -> p[T, A]:
+    a = t # type: S # E: Incompatible types in assignment (expression has type "T", variable has type "S")
+    s = t           # E: Incompatible types in assignment (expression has type "T", variable has type "S")
+    p_s_a = None  # type: p[S, A]
+    if s:
+        return p_s_a # E: Incompatible return value type (got p[S, A], expected p[T, A])
+    b = t # type: T
+    c = s # type: S
+    p_t_a = None  # type: p[T, A]
+    return p_t_a
+[out]
+
+[case testTypeCheckingGenericMethodBody]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class p(Generic[T, S]):
+    def __init__(self, t: T, a: S) -> None: pass
+class A(Generic[T]):
+    def f(self, s: S, t: T) -> p[S, T]:
+        s = t # E: Incompatible types in assignment (expression has type "T", variable has type "S")
+        p_s_s = None  # type: p[S, S]
+        if s:
+            return p_s_s # E: Incompatible return value type (got p[S, S], expected p[S, T])
+        p_t_t = None  # type: p[T, T]
+        if t:
+            return p_t_t # E: Incompatible return value type (got p[T, T], expected p[S, T])
+        t = t
+        s = s
+        p_s_t = None  # type: p[S, T]
+        return p_s_t
+[out]
+
+[case testProhibitTypeApplicationToGenericFunctions]
+from typing import TypeVar
+T = TypeVar('T')
+def f(x: T) -> T: pass
+
+y = f[int]  # E: Type application is only supported for generic classes
+[out]
+
+
+-- Generic types in expressions
+-- ----------------------------
+
+
+[case testTypeApplicationArgs]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class Node(Generic[T]):
+    def __init__(self, x: T) -> None:
+        ...
+Node[int]()  # E: Too few arguments for "Node"
+Node[int](1, 1, 1)  # E: Too many arguments for "Node"
+[out]
+
+[case testTypeApplicationTvars]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class A(Generic[T, S]): pass
+A[int]()  # E: Type application has too few types (2 expected)
+A[int, str, int]() # E: Type application has too many types (2 expected)
+[out]
+
+[case testInvalidTypeApplicationType]
+a = None # type: A
+class A: pass
+a[A]()  # E: Value of type "A" is not indexable
+A[A]()  # E: Type application targets a non-generic function or class
+[out]
+
+[case testTypeApplicationArgTypes]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class Node(Generic[T]):
+    def __init__(self, x: T) -> None:
+        ...
+
+Node[int](1)
+Node[int]('a')  # E: Argument 1 to "Node" has incompatible type "str"; expected "int"
+
+class Dummy(Generic[T]):
+    def meth(self, x: T) -> None:
+        ...
+    def methout(self) -> T:
+        ...
+
+Dummy[int]().meth(1)
+Dummy[int]().meth('a')  # E: Argument 1 to "meth" of "Dummy" has incompatible type "str"; expected "int"
+reveal_type(Dummy[int]())  # E: Revealed type is '__main__.Dummy[builtins.int*]'
+reveal_type(Dummy[int]().methout())  # E: Revealed type is 'builtins.int*'
+[out]
+
+[case testTypeApplicationArgTypesSubclasses]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class C(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None:
+        ...
+
+class D(C[int, T], Generic[T]): ...
+
+D[str](1, 'a')
+D[str](1, 1)  # E: Argument 2 to "D" has incompatible type "int"; expected "str"
+
+class E(D[str]): ...
+E(1, 'a')
+E(1, 1)  # E: Argument 2 to "E" has incompatible type "int"; expected "str"
+[out]
+
+[case testTypeApplicationAlias]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class Node(Generic[T]):
+    def __init__(self, x: T) -> None:
+        ...
+
+Alias = Node
+Alias[int](1)
+Alias[int]("a")  # E: Argument 1 to "Node" has incompatible type "str"; expected "int"
+[out]
+
+[case testTypeApplicationCrash]
+type[int] # this was crashing, see #2302 (comment)  # E: Type application targets a non-generic function or class
+[out]
+
+
+-- Generic type aliases
+-- --------------------
+
+[case testGenericTypeAliasesBasic]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class Node(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None:
+        ...
+
+IntNode = Node[int, S]
+IntIntNode = Node[int, int]
+SameNode = Node[T, T]
+
+n = Node(1, 1) # type: IntIntNode
+n1 = Node(1, 'a') # type: IntIntNode # E: Argument 2 to "Node" has incompatible type "str"; expected "int"
+
+m = Node(1, 1) # type: IntNode
+m1 = Node('x', 1) # type: IntNode # E: Argument 1 to "Node" has incompatible type "str"; expected "int"
+m2 = Node(1, 1) # type: IntNode[str] # E: Argument 2 to "Node" has incompatible type "int"; expected "str"
+
+s = Node(1, 1) # type: SameNode[int]
+reveal_type(s) # E: Revealed type is '__main__.Node[builtins.int, builtins.int]'
+s1 = Node(1, 'x') # type: SameNode[int] # E: Argument 2 to "Node" has incompatible type "str"; expected "int"
+
+[out]
+
+[case testGenericTypeAliasesBasic2]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class Node(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None:
+        ...
+
+IntNode = Node[int, S]
+IntIntNode = Node[int, int]
+SameNode = Node[T, T]
+
+def output_bad() -> IntNode[str]:
+    return Node(1, 1) # Eroor - bad return type, see out
+
+def input(x: IntNode[str]) -> None:
+    pass
+input(Node(1, 's'))
+input(Node(1, 1)) # E: Argument 2 to "Node" has incompatible type "int"; expected "str"
+
+def output() -> IntNode[str]:
+    return Node(1, 'x')
+reveal_type(output()) # E: Revealed type is '__main__.Node[builtins.int, builtins.str]'
+
+def func(x: IntNode[T]) -> IntNode[T]:
+    return x
+reveal_type(func) # E: Revealed type is 'def [T] (x: __main__.Node[builtins.int, T`-1]) -> __main__.Node[builtins.int, T`-1]'
+
+func(1) # E: Argument 1 to "func" has incompatible type "int"; expected Node[int, None]
+func(Node('x', 1)) # E: Argument 1 to "Node" has incompatible type "str"; expected "int"
+reveal_type(func(Node(1, 'x'))) # E: Revealed type is '__main__.Node[builtins.int, builtins.str*]'
+
+def func2(x: SameNode[T]) -> SameNode[T]:
+    return x
+reveal_type(func2) # E: Revealed type is 'def [T] (x: __main__.Node[T`-1, T`-1]) -> __main__.Node[T`-1, T`-1]'
+
+func2(Node(1, 'x')) # E: Cannot infer type argument 1 of "func2"
+y = func2(Node('x', 'x'))
+reveal_type(y) # E: Revealed type is '__main__.Node[builtins.str*, builtins.str*]'
+
+def wrap(x: T) -> IntNode[T]:
+    return Node(1, x)
+
+z = None # type: str
+reveal_type(wrap(z)) # E: Revealed type is '__main__.Node[builtins.int, builtins.str*]'
+
+[out]
+main:13: error: Argument 2 to "Node" has incompatible type "int"; expected "str"
+
+[case testGenericTypeAliasesWrongAliases]
+# flags: --show-column-numbers --fast-parser --python-version 3.6
+from typing import TypeVar, Generic, List, Callable, Tuple, Union
+T = TypeVar('T')
+S = TypeVar('S')
+class Node(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None:
+        ...
+
+A = Node[T] # Error
+B = Node[T, T]
+C = Node[T, T, T] # Error
+D = Node[T, S]
+E = Node[Node[T, T], List[T]]
+
+F = Node[List[T, T], S] # Error
+G = Callable[..., List[T, T]] # Error
+H = Union[int, Tuple[T, Node[T]]] # Error
+h: H # Error
+h1: H[int, str] # Error
+
+x = None # type: D[int, str]
+reveal_type(x)
+y = None # type: E[int]
+reveal_type(y)
+
+X = T # Error
+
+[builtins fixtures/list.pyi]
+[out]
+main:9:4: error: "Node" expects 2 type arguments, but 1 given
+main:11:4: error: "Node" expects 2 type arguments, but 3 given
+main:15:9: error: "list" expects 1 type argument, but 2 given
+main:16:18: error: "list" expects 1 type argument, but 2 given
+main:17:24: error: "Node" expects 2 type arguments, but 1 given
+main:18:3: error: "Node" expects 2 type arguments, but 1 given
+main:19:4: error: Bad number of arguments for type alias, expected: 1, given: 2
+main:22:0: error: Revealed type is '__main__.Node[builtins.int, builtins.str]'
+main:24:0: error: Revealed type is '__main__.Node[__main__.Node[builtins.int, builtins.int], builtins.list[builtins.int]]'
+main:26:4: error: Type variable "__main__.T" is invalid as target for type alias
+
+[case testGenericTypeAliasesForAliases]
+from typing import TypeVar, Generic, List, Union
+T = TypeVar('T')
+S = TypeVar('S')
+
+class Node(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None:
+        pass
+
+ListedNode = Node[List[T], List[S]]
+Second = ListedNode[int, T]
+Third = Union[int, Second[str]]
+
+def f2(x: T) -> Second[T]:
+    return Node([1], [x])
+reveal_type(f2('a')) # E: Revealed type is '__main__.Node[builtins.list[builtins.int], builtins.list[builtins.str*]]'
+
+def f3() -> Third:
+    return Node([1], ['x'])
+reveal_type(f3()) # E: Revealed type is 'Union[builtins.int, __main__.Node[builtins.list[builtins.int], builtins.list[builtins.str]]]'
+
+[builtins fixtures/list.pyi]
+
+[case testGenericTypeAliasesAny]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class Node(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None:
+        self.x = x
+        self.y = y
+
+IntNode = Node[int, S]
+AnyNode = Node[S, T]
+
+def output() -> IntNode[str]:
+    return Node(1, 'x')
+x = output() # type: IntNode # This is OK (implicit Any)
+
+y = None # type: IntNode
+y.x = 1
+y.x = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+y.y = 1 # Both are OK (implicit Any)
+y.y = 'x'
+
+z = Node(1, 'x') # type: AnyNode
+reveal_type(z) # E: Revealed type is '__main__.Node[Any, Any]'
+
+[out]
+
+[case testGenericTypeAliasesAcessingMethods]
+from typing import TypeVar, Generic, List
+T = TypeVar('T')
+class Node(Generic[T]):
+    def __init__(self, x: T) -> None:
+        self.x = x
+    def meth(self) -> T:
+        return self.x
+
+ListedNode = Node[List[T]]
+l = None # type: ListedNode[int]
+l.x.append(1)
+l.meth().append(1)
+reveal_type(l.meth()) # E: Revealed type is 'builtins.list*[builtins.int]'
+l.meth().append('x') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
+
+ListedNode[str]([]).x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type List[str])
+
+[builtins fixtures/list.pyi]
+
+[case testGenericTypeAliasesSubclassing]
+from typing import TypeVar, Generic, Tuple, List
+T = TypeVar('T')
+class Node(Generic[T]):
+    def __init__(self, x: T) -> None:
+        ...
+
+TupledNode = Node[Tuple[T, T]]
+
+class D(Generic[T], TupledNode[T]):
+    ...
+class L(Generic[T], List[TupledNode[T]]):
+    ...
+
+def f_bad(x: T) -> D[T]:
+    return D(1)  # Error, see out
+
+L[int]().append(Node((1, 1)))
+L[int]().append(5) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected Node[Tuple[int, int]]
+
+x = D((1, 1)) # type: D[int]
+y = D(5) # type: D[int] # E: Argument 1 to "D" has incompatible type "int"; expected "Tuple[int, int]"
+
+def f(x: T) -> D[T]:
+    return D((x, x))
+reveal_type(f('a'))  # E: Revealed type is '__main__.D[builtins.str*]'
+
+[builtins fixtures/list.pyi]
+[out]
+main:15: error: Argument 1 to "D" has incompatible type "int"; expected "Tuple[T, T]"
+
+[case testGenericTypeAliasesSubclassingBad]
+from typing import TypeVar, Generic, Tuple, Union
+T = TypeVar('T')
+class Node(Generic[T]):
+    def __init__(self, x: T) -> None:
+        ...
+
+TupledNode = Node[Tuple[T, T]]
+UNode = Union[int, Node[T]]
+
+class C(TupledNode): ... # Same as TupledNode[Any]
+class D(TupledNode[T]): ... # E: Invalid type "__main__.T"
+class E(Generic[T], UNode[T]): ... # E: Invalid base class
+
+[builtins fixtures/list.pyi]
+
+[case testGenericTypeAliasesUnion]
+from typing import TypeVar, Generic, Union, Any
+T = TypeVar('T')
+class Node(Generic[T]):
+    def __init__(self, x: T) -> None:
+        self.x = x
+
+UNode = Union[int, Node[T]]
+x = 1 # type: UNode[int]
+
+x + 1 # E: Unsupported left operand type for + (some union)
+if not isinstance(x, Node):
+    x + 1
+
+if not isinstance(x, int):
+   x.x = 1
+   x.x = 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+def f(x: T) -> UNode[T]:
+    if 1:
+        return Node(x)
+    else:
+        return 1
+
+reveal_type(f(1)) # E: Revealed type is 'Union[builtins.int, __main__.Node[builtins.int*]]'
+
+TNode = Union[T, Node[int]]
+s = 1 # type: TNode[str] # E: Incompatible types in assignment (expression has type "int", variable has type "Union[str, Node[int]]")
+
+if not isinstance(s, str):
+    s.x = 1
+
+z = None # type: TNode # Same as TNode[Any]
+z.x
+z.foo() # Any simplifies Union to Any now. This test should be updated after #2197
+
+[builtins fixtures/isinstance.pyi]
+
+[case testGenericTypeAliasesTuple]
+from typing import TypeVar, Tuple
+T = TypeVar('T')
+
+SameTP = Tuple[T, T]
+IntTP = Tuple[int, T]
+
+def f1(x: T) -> SameTP[T]:
+    return x, x
+
+a, b, c = f1(1) # E: Need more than 2 values to unpack (3 expected)
+x, y = f1(1)
+reveal_type(x) # E: Revealed type is 'builtins.int'
+
+def f2(x: IntTP[T]) -> IntTP[T]:
+    return x
+
+f2((1, 2, 3)) # E: Argument 1 to "f2" has incompatible type "Tuple[int, int, int]"; expected "Tuple[int, None]"
+reveal_type(f2((1, 'x'))) # E: Revealed type is 'Tuple[builtins.int, builtins.str*]'
+
+[builtins fixtures/for.pyi]
+
+[case testGenericTypeAliasesCallable]
+from typing import TypeVar, Generic, Callable
+T = TypeVar('T')
+class Node(Generic[T]):
+    def __init__(self, x: T) -> None:
+        ...
+
+BadC = Callable[T] # E: Invalid function type
+
+C = Callable[..., T]
+C2 = Callable[[T, T], Node[T]]
+
+def make_cb(x: T) -> C[T]:
+    return lambda *args: x
+
+reveal_type(make_cb(1)) # E: Revealed type is 'def (*Any, **Any) -> builtins.int*'
+
+def use_cb(arg: T, cb: C2[T]) -> Node[T]:
+    return cb(arg, arg)
+
+use_cb(1, 1) # E: Argument 2 to "use_cb" has incompatible type "int"; expected Callable[[int, int], Node[int]]
+my_cb = None # type: C2[int]
+use_cb('x', my_cb) # E: Argument 2 to "use_cb" has incompatible type Callable[[int, int], Node[int]]; expected Callable[[str, str], Node[str]]
+reveal_type(use_cb(1, my_cb)) # E: Revealed type is '__main__.Node[builtins.int]'
+
+[out]
+
+[case testGenericTypeAliasesPEPBasedExample]
+from typing import TypeVar, List, Tuple
+T = TypeVar('T', int, bool)
+
+Vec = List[Tuple[T, T]]
+
+vec = []  # type: Vec[bool]
+vec.append('x') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "Tuple[bool, bool]"
+reveal_type(vec[0]) # E: Revealed type is 'Tuple[builtins.bool, builtins.bool]'
+
+def fun1(v: Vec[T]) -> T:
+    return v[0][0]
+def fun2(v: Vec[T], scale: T) -> Vec[T]:
+    return v
+
+reveal_type(fun1([(1, 1)])) # E: Revealed type is 'builtins.int*'
+fun1(1) # E: Argument 1 to "fun1" has incompatible type "int"; expected List[Tuple[int, int]]
+fun1([(1, 'x')]) # E: Cannot infer type argument 1 of "fun1"
+
+reveal_type(fun2([(1, 1)], 1)) # E: Revealed type is 'builtins.list[Tuple[builtins.int*, builtins.int*]]'
+fun2([('x', 'x')], 'x') # E: Type argument 1 of "fun2" has incompatible value "str"
+
+[builtins fixtures/list.pyi]
+
+[case testGenericTypeAliasesImporting]
+from typing import TypeVar
+from a import Node, TupledNode
+T = TypeVar('T')
+
+n = None # type: TupledNode[int]
+n.x = 1
+n.y = (1, 1)
+n.y = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "Tuple[int, int]")
+
+def f(x: Node[T, T]) -> TupledNode[T]:
+    return Node(x.x, (x.x, x.x))
+
+f(1) # E: Argument 1 to "f" has incompatible type "int"; expected Node[None, None]
+f(Node(1, 'x')) # E: Cannot infer type argument 1 of "f"
+reveal_type(Node('x', 'x')) # E: Revealed type is 'a.Node[builtins.str*, builtins.str*]'
+
+[file a.py]
+from typing import TypeVar, Generic, Tuple
+T = TypeVar('T')
+S = TypeVar('S')
+class Node(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None:
+        self.x = x
+        self.y = y
+
+TupledNode = Node[T, Tuple[T, T]]
+
+[builtins fixtures/list.pyi]
+
+[case testGenericTypeAliasesRuntimeExpressionsInstance]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class Node(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None:
+        ...
+
+IntNode = Node[int, T]
+IntNode[int](1, 1)
+IntNode[int](1, 'a')  # E: Argument 2 to "Node" has incompatible type "str"; expected "int"
+
+SameNode = Node[T, T]
+ff = SameNode[T](1, 1)  # E: Need type annotation for variable
+a = SameNode(1, 'x')
+reveal_type(a) # E: Revealed type is '__main__.Node[Any, Any]'
+b = SameNode[int](1, 1)
+reveal_type(b) # E: Revealed type is '__main__.Node[builtins.int*, builtins.int*]'
+SameNode[int](1, 'x') # E: Argument 2 to "Node" has incompatible type "str"; expected "int"
+
+[out]
+
+[case testGenericTypeAliasesRuntimeExpressionsOther]
+from typing import TypeVar, Union, Tuple, Callable, Any
+T = TypeVar('T')
+
+CA = Callable[[T], int]
+TA = Tuple[T, int]
+UA = Union[T, int]
+
+cs = CA[str] + 1 # E: Unsupported left operand type for + ("Type alias to Callable")
+reveal_type(cs) # E: Revealed type is 'Any'
+
+ts = TA[str]() # E: "Type alias to Tuple" not callable
+reveal_type(ts) # E: Revealed type is 'Any'
+
+us = UA[str].x # E: "Type alias to Union" has no attribute "x"
+reveal_type(us) # E: Revealed type is 'Any'
+
+[out]
+
+[case testGenericTypeAliasesTypeVarBinding]
+from typing import TypeVar, Generic, List
+T = TypeVar('T')
+S = TypeVar('S')
+
+class A(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None: ...
+
+class B(Generic[T, S]):
+    def __init__(self, x: List[T], y: List[S]) -> None: ...
+
+SameA = A[T, T]
+SameB = B[T, T]
+
+class C(Generic[T]):
+    a = None # type: SameA[T]
+    b = SameB[T]([], [])
+
+reveal_type(C[int]().a) # E: Revealed type is '__main__.A[builtins.int*, builtins.int*]'
+reveal_type(C[str]().b) # E: Revealed type is '__main__.B[builtins.str*, builtins.str*]'
+
+[builtins fixtures/list.pyi]
+
+[case testGenericTypeAliasesTypeVarConstraints]
+# flags: --show-column-numbers
+from typing import TypeVar, Generic
+T = TypeVar('T', int, list)
+S = TypeVar('S', int, list)
+
+class A(Generic[T, S]):
+    def __init__(self, x: T, y: S) -> None: ...
+
+BadA = A[str, T]  # One error here
+SameA = A[T, T]
+
+x = None # type: SameA[int]
+y = None # type: SameA[str] # Two errors here, for both args of A
+
+[builtins fixtures/list.pyi]
+[out]
+main:9:7: error: Type argument 1 of "A" has incompatible value "str"
+main:13: error: Type argument 1 of "A" has incompatible value "str"
+main:13: error: Type argument 2 of "A" has incompatible value "str"
+
+[case testGenericTypeAliasesIgnoredPotentialAlias]
+class A: ...
+Bad = A[int] # type: ignore
+
+reveal_type(Bad) # E: Revealed type is 'Any'
+[out]
+
+
+-- Multiple assignment with lists
+-- ------------------------------
+
+
+[case testMultipleAssignmentWithLists]
+from typing import List
+class A: pass
+class B: pass
+class B2(B): pass
+a = None # type: A
+b = None # type: B
+b2 = None # type: B2
+
+list_a = [a]
+list_b = [b]
+list_b2 = [b2]
+
+a, b = list_a   # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b, a = list_a   # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b2, b2 = list_b # E: Incompatible types in assignment (expression has type "B", variable has type "B2")
+
+a, a = list_a
+b, b2, b = list_b2
+[builtins fixtures/for.pyi]
+
+[case testMultipleAssignmentWithListsInInitialization]
+from typing import List
+class A: pass
+list_object = [object()]
+list_a = [A()]
+a, b = list_object # type: (A, object) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+c, d = list_object # type: (object, A) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+e, f = list_a # type: (A, object)
+[builtins fixtures/for.pyi]
+
+[case testMultipleAssignmentWithListAndIndexing]
+from typing import List
+a = None # type: List[A]
+b = None # type: List[int]
+
+a[1], b[1] = a # E: Incompatible types in assignment (expression has type "A", target has type "int")
+a[1], a[2] = a
+
+class A: pass
+[file builtins.py]
+from typing import TypeVar, Generic, Iterable
+T = TypeVar('T')
+class object: pass
+class list(Iterable[T], Generic[T]):
+  def __setitem__(self, x: int, v: T) -> None: pass
+class int: pass
+class type: pass
+class tuple: pass
+class function: pass
+class str: pass
+
+[case testMultipleAssignmentWithIterable]
+from typing import Iterable, TypeVar
+a = None  # type: int
+b = None  # type: str
+T = TypeVar('T')
+
+def f(x: T) -> Iterable[T]: pass
+
+a, b = f(a)   # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+b, b = f(a)   # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+a, a = f(a)
+b, b = f(b)
+[builtins fixtures/for.pyi]
+
+
+-- Error messages
+-- --------------
+
+
+[case testErrorWithLongGenericTypeName]
+from typing import TypeVar, Generic
+B = TypeVar('B')
+C = TypeVar('C')
+D = TypeVar('D')
+E = TypeVar('E')
+F = TypeVar('F')
+G = TypeVar('G')
+H = TypeVar('H')
+I = TypeVar('I')
+J = TypeVar('J')
+K = TypeVar('K')
+L = TypeVar('L')
+M = TypeVar('M')
+N = TypeVar('N')
+O = TypeVar('O')
+P = TypeVar('P')
+Q = TypeVar('Q')
+R = TypeVar('R')
+S = TypeVar('S')
+T = TypeVar('T')
+U = TypeVar('U')
+V = TypeVar('V')
+W = TypeVar('W')
+X = TypeVar('X')
+Y = TypeVar('Y')
+Z = TypeVar('Z')
+class OO: pass
+a = None # type: A[object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object]
+
+f(a) # E: Argument 1 to "f" has incompatible type A[...]; expected "OO"
+
+def f(a: OO) -> None:
+    pass
+class A(Generic[B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z]): pass
+
+[case testErrorWithShorterGenericTypeName]
+from typing import TypeVar, Generic
+S = TypeVar('S')
+T = TypeVar('T')
+a = None # type: A[object, B]
+f(a) # E: Argument 1 to "f" has incompatible type A[object, B]; expected "B"
+
+def f(a: 'B') -> None: pass
+class A(Generic[S, T]): pass
+class B: pass
+
+[case testErrorWithShorterGenericTypeName2]
+from typing import Callable, TypeVar, Generic
+S = TypeVar('S')
+T = TypeVar('T')
+a = None # type: A[object, Callable[[], None]]
+f(a) # E: Argument 1 to "f" has incompatible type A[object, Callable[[], None]]; expected "B"
+
+def f(a: 'B') -> None: pass
+class A(Generic[S, T]): pass
+class B: pass
+
+
+-- Overloads + generics
+-- --------------------
+
+
+[case testGenericArgumentInOverload]
+from typing import overload, List
+class A: pass
+class B: pass
+a, b = None, None # type: (A, B)
+
+ at overload
+def f(a: List[A]) -> A: pass
+ at overload
+def f(a: B) -> B: pass
+
+b = f([a]) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = f([b]) # E: List item 0 has incompatible type "B"
+a = f(b)   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+a = f([a])
+b = f(b)
+[builtins fixtures/list.pyi]
+
+[case testGenericFunctionAsOverloadItem]
+from typing import overload, TypeVar, List
+T = TypeVar('T')
+class A: pass
+class B: pass
+
+ at overload
+def f(a: B) -> B: pass
+ at overload
+def f(a: List[T]) -> T: pass
+
+a, b = None, None # type: (A, B)
+
+b = f([a]) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = f([b]) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a = f(b)   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+a = f([a])
+b = f([b])
+b = f(b)
+[builtins fixtures/list.pyi]
+
+
+-- Type variable scoping
+-- ---------------------
+
+
+[case testLocalTypeVariable]
+from typing import TypeVar
+def f() -> None:
+    T = TypeVar('T')
+    def g(x: T) -> T: pass
+    a = g(1)
+    a = 1
+    a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+[out]
+
+[case testClassLevelTypeVariable]
+from typing import TypeVar
+class A:
+    T = TypeVar('T')
+    def g(self, x: T) -> T: pass
+a = A().g(1)
+a = 1
+a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testGenericClassInnerFunctionTypeVariable]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    def __init__(self, a: T) -> None:
+        self.a = a
+    def f(self, n: int) -> None:
+        def g(a: T):
+            self.a = a
+        g(self.a)
+        g(n) # E: Argument 1 to "g" has incompatible type "int"; expected "T"
+[out]
+
+
+-- Callable subtyping with generic functions
+-- -----------------------------------------
+
+
+[case testSubtypingWithGenericFunctions]
+from typing import TypeVar
+A = TypeVar('A')
+B = TypeVar('B')
+
+def f1(x: A) -> A: ...
+def f2(x: A) -> B: ...
+def f3(x: B) -> B: ...
+def f4(x: int) -> A: ...
+
+y1 = f1
+y1 = f1
+y1 = f2
+y1 = f3
+y1 = f4 # E: Incompatible types in assignment (expression has type Callable[[int], A], variable has type Callable[[A], A])
+
+y2 = f2
+y2 = f2
+y2 = f1 # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[A], B])
+y2 = f3 # E: Incompatible types in assignment (expression has type Callable[[B], B], variable has type Callable[[A], B])
+y2 = f4 # E: Incompatible types in assignment (expression has type Callable[[int], A], variable has type Callable[[A], B])
+
+y3 = f3
+y3 = f3
+y3 = f1
+y3 = f2
+y3 = f4 # E: Incompatible types in assignment (expression has type Callable[[int], A], variable has type Callable[[B], B])
+
+y4 = f4
+y4 = f4
+y4 = f1 # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[int], A])
+y4 = f2
+y4 = f3 # E: Incompatible types in assignment (expression has type Callable[[B], B], variable has type Callable[[int], A])
+
+[case testSubtypingWithGenericInnerFunctions]
+from typing import TypeVar
+A = TypeVar('A')
+B = TypeVar('B')
+T = TypeVar('T')
+def outer(t: T) -> None:
+    def f1(x: A) -> A: ...
+    def f2(x: A) -> B: ...
+    def f3(x: T) -> A: ...
+    def f4(x: A) -> T: ...
+    def f5(x: T) -> T: ...
+
+    y1 = f1
+    y1 = f2
+    y1 = f3 # E: Incompatible types in assignment (expression has type Callable[[T], A], variable has type Callable[[A], A])
+    y1 = f4 # E: Incompatible types in assignment (expression has type Callable[[A], T], variable has type Callable[[A], A])
+    y1 = f5 # E: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[A], A])
+
+    y2 = f2
+    y2 = f1 # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[A], B])
+
+    y3 = f3
+    y3 = f1 # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[T], A])
+    y3 = f2
+    y3 = f4 # E: Incompatible types in assignment (expression has type Callable[[A], T], variable has type Callable[[T], A])
+    y3 = f5 # E: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[T], A])
+
+    y4 = f4
+    y4 = f1 # E: Incompatible types in assignment (expression has type Callable[[A], A], variable has type Callable[[A], T])
+    y4 = f2
+    y4 = f3 # E: Incompatible types in assignment (expression has type Callable[[T], A], variable has type Callable[[A], T])
+    y4 = f5 # E: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[A], T])
+
+    y5 = f5
+    y5 = f1
+    y5 = f2
+    y5 = f3
+    y5 = f4
+[out]
+
+[case testSubtypingWithGenericFunctionUsingTypevarWithValues]
+from typing import TypeVar, Callable
+T = TypeVar('T', int, str)
+def f(x: T) -> T: pass
+def g1(f: Callable[[str], str]) -> None: pass
+g1(f)
+def g2(f: Callable[[int], int]) -> None: pass
+g2(f)
+def g3(f: Callable[[object], object]) -> None: pass
+g3(f) # E: Argument 1 to "g3" has incompatible type Callable[[T], T]; \
+           expected Callable[[object], object]
+
+[case testSubtypingWithGenericFunctionUsingTypevarWithValues2-skip]
+from typing import TypeVar, Callable
+T = TypeVar('T', int, str)
+def f(x: T) -> T: pass
+g = f
+g = f
+
+
+--Operations on type variable types
+-- ---------------------------------
+
+
+[case testTypeVariableTypeEquality]
+from typing import TypeVar
+T = TypeVar('T')
+def f(a: T, b: T) -> T:
+    a.__ne__(b)
+    if a == b:
+        return a
+    else:
+        return b
+[builtins fixtures/ops.pyi]
+
+[case testTypeVariableTypeIs]
+from typing import TypeVar
+T = TypeVar('T')
+def f(a: T, b: T) -> T:
+    if a is b or a is 1:
+        return a
+    else:
+        return b
+[builtins fixtures/ops.pyi]
+
+[case testTypeVariableTypeLessThan]
+from typing import TypeVar
+T = TypeVar('T')
+def f(a: T, b: T) -> T:
+    if a < b:
+        return a
+    else:
+        return b
+[builtins fixtures/ops.pyi]
+[out]
+main:4: error: Unsupported left operand type for < ("T")
+
+
+-- Subtyping generic callables
+-- ---------------------------
+
+[case testSubtypingGenericTypeObject]
+from typing import Callable, Generic, TypeVar
+T = TypeVar('T')
+class C(Generic[T]):
+    def __init__(self) -> None: pass
+x = C # type: Callable[[], C[int]]
+y = C # type: Callable[[], int] # E: Incompatible types in assignment (expression has type C[T], variable has type Callable[[], int])
+
+
+-- Special cases
+-- -------------
+
+
+[case testIdentityHigherOrderFunction]
+from typing import Callable, TypeVar
+A = TypeVar('A')
+B = TypeVar('B')
+def square(n: int) -> int:
+    return n
+def id(f: Callable[[A], B]) -> Callable[[A], B]:
+    return f
+g = id(square)
+g(1)
+g('x')  # E: Argument 1 has incompatible type "str"; expected "int"
+
+
+[case testIdentityHigherOrderFunction2]
+from typing import Callable, TypeVar
+A = TypeVar('A')
+def voidify(n: int) -> None: pass
+def identity(f: Callable[[A], None]) -> Callable[[A], None]:
+    return f
+identity(voidify)(3)
+
+[case testIdentityHigherOrderFunction3]
+from typing import Callable, TypeVar
+A = TypeVar('A')
+B = TypeVar('B')
+def fn(n: B) -> None: pass
+def identity(f: A) -> A:
+    return f
+identity(fn)
+identity(fn)('x')
+
+[case testTypeVariableUnionAndCallableInTypeInference]
+from typing import Union, Callable, TypeVar
+T = TypeVar('T')
+def f(x: T, y: Union[T, Callable[[T], None]]) -> None: pass
+f('', '')
+
+[case testGenericFunctionsWithUnalignedIds]
+from typing import TypeVar
+A = TypeVar('A')
+B = TypeVar('B')
+def f1(x: int, y: A) -> A: ...
+def f2(x: int, y: A) -> B: ...
+def f3(x: A, y: B) -> B: ...
+g = f1
+g = f2
+g = f3
+
+[case testTypeVariableWithContainerAndTuple]
+from typing import TypeVar, Container
+T = TypeVar('T')
+def f(x: Container[T]) -> T: ...
+reveal_type(f((1, 2))) # E: Revealed type is 'builtins.int*'
+
+[case testClassMethodInGenericClassWithGenericConstructorArg]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    def __init__(self, a: T) -> None: pass
+    @classmethod
+    def f(cls) -> None: pass
+[builtins fixtures/classmethod.pyi]
+
+[case testClassMethodInClassWithGenericConstructor]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A:
+    def __init__(self, a: T) -> None: pass
+    @classmethod
+    def f(cls) -> None: pass
+[builtins fixtures/classmethod.pyi]
+
+[case testGenericOperatorMethodOverlapping]
+from typing import TypeVar, Generic, Tuple
+T = TypeVar('T')
+T2 = TypeVar('T2')
+S = TypeVar('S', bound=str)
+S2 = TypeVar('S2', bound=str)
+class G(Generic[T]):
+    pass
+class A:
+    def __or__(self, x: G[T]) -> G[T]: pass
+    def __ior__(self, x: G[T2]) -> G[T2]: pass
+class B:
+    def __or__(self, x: G[T]) -> G[T]: pass
+    def __ior__(self, x: G[S]) -> G[S]: pass \
+        # E: Signatures of "__ior__" and "__or__" are incompatible
+class C:
+    def __or__(self, x: G[S]) -> G[S]: pass
+    def __ior__(self, x: G[S2]) -> G[S2]: pass
+
+[case testGenericOperatorMethodOverlapping2]
+from typing import TypeVar, Generic, Tuple
+X = TypeVar('X')
+T = TypeVar('T', int, str)
+T2 = TypeVar('T2', int, str)
+S = TypeVar('S', float, str)
+S2 = TypeVar('S2', float, str)
+class G(Generic[X]):
+    pass
+class A:
+    def __or__(self, x: G[T]) -> G[T]: pass
+    def __ior__(self, x: G[T2]) -> G[T2]: pass
+class B:
+    def __or__(self, x: G[T]) -> G[T]: pass
+    def __ior__(self, x: G[S]) -> G[S]: pass \
+        # E: Signatures of "__ior__" and "__or__" are incompatible
+class C:
+    def __or__(self, x: G[S]) -> G[S]: pass
+    def __ior__(self, x: G[S2]) -> G[S2]: pass
+class D:
+    def __or__(self, x: G[X]) -> G[X]: pass
+    def __ior__(self, x: G[S2]) -> G[S2]: pass \
+        # E: Signatures of "__ior__" and "__or__" are incompatible
diff --git a/test-data/unit/check-ignore.test b/test-data/unit/check-ignore.test
new file mode 100644
index 0000000..b6d127c
--- /dev/null
+++ b/test-data/unit/check-ignore.test
@@ -0,0 +1,218 @@
+[case testIgnoreTypeError]
+x = 1
+x() # type: ignore
+x() # E: "int" not callable
+
+[case testIgnoreUndefinedName]
+x = 1
+y # type: ignore
+z # E: Name 'z' is not defined
+
+[case testIgnoreImportError]
+import xyz_m # type: ignore
+xyz_m.foo
+1() # E: "int" not callable
+
+[case testIgnoreImportFromError]
+from xyz_m import a, b # type: ignore
+a.foo
+b()
+1() # E: "int" not callable
+
+[case testIgnoreImportFromErrorMultiline]
+from xyz_m import ( # type: ignore
+    a, b
+)
+a.foo
+b()
+1() # E: "int" not callable
+
+[case testIgnoreImportAllError]
+from xyz_m import * # type: ignore
+x   # E: Name 'x' is not defined
+1() # E: "int" not callable
+
+[case testIgnoreImportBadModule]
+import m # type: ignore
+from m import a # type: ignore
+[file m.py]
++
+[out]
+tmp/m.py:1: error: invalid syntax
+
+[case testIgnoreAppliesOnlyToMissing]
+import a # type: ignore
+import b # type: ignore
+reveal_type(a.foo) # E: Revealed type is 'Any'
+reveal_type(b.foo) # E: Revealed type is 'builtins.int'
+a.bar()
+b.bar() # E: "module" has no attribute "bar"
+
+[file b.py]
+foo = 3
+
+[builtins fixtures/module_all.pyi]
+[out]
+
+[case testIgnoreImportStarFromBadModule]
+from m import * # type: ignore
+[file m.py]
++
+[out]
+tmp/m.py:1: error: invalid syntax
+
+[case testIgnoreAssignmentTypeError]
+x = 1
+x = '' # type: ignore
+x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testIgnoreInvalidOverride]
+class A:
+    def f(self) -> int: pass
+class B(A):
+    def f(self) -> str: pass # type: ignore
+
+[case testIgnoreMissingModuleAttribute]
+import m
+m.x = object # type: ignore
+m.f() # type: ignore
+m.y # E: "module" has no attribute "y"
+[file m.py]
+[builtins fixtures/module.pyi]
+
+[case testIgnoreTypeInferenceError]
+x = [] # type: ignore
+y = x
+x.append(1)
+[builtins fixtures/list.pyi]
+
+[case testIgnoreTypeInferenceError2]
+def f() -> None: pass
+x = f() # type: ignore
+y = x
+x = 1
+[builtins fixtures/list.pyi]
+
+[case testIgnoreTypeInferenceErrorAndMultipleAssignment]
+x, y = [], [] # type: ignore
+z = x
+z = y
+[builtins fixtures/list.pyi]
+
+[case testIgnoreSomeStarImportErrors]
+from m1 import *
+from m2 import * # type: ignore
+# We should still import things that don't conflict.
+y() # E: "str" not callable
+z() # E: "int" not callable
+x() # E: "int" not callable
+[file m1.py]
+x = 1
+y = ''
+[file m2.py]
+x = ''
+z = 1
+
+[case testIgnoredModuleDefinesBaseClass1]
+from m import B # type: ignore
+
+class C(B):
+    def f(self) -> None:
+        self.f(1) # E: Too many arguments for "f" of "C"
+        self.g(1)
+[out]
+
+[case testIgnoredModuleDefinesBaseClass2]
+import m # type: ignore
+
+class C(m.B):
+    def f(self) -> None: ...
+
+c = C()
+c.f(1) # E: Too many arguments for "f" of "C"
+c.g(1)
+c.x = 1
+[out]
+
+[case testIgnoredModuleDefinesBaseClassAndClassAttribute]
+import m # type: ignore
+
+class C(m.B):
+    @staticmethod
+    def f() -> None: pass
+
+C.f(1) # E: Too many arguments for "f" of "C"
+C.g(1)
+C.x = 1
+[builtins fixtures/staticmethod.pyi]
+[out]
+
+[case testIgnoredModuleDefinesBaseClassWithInheritance1]
+from m import B # type: ignore
+
+class C: pass
+class D(C, B):
+    def f(self) -> None:
+        self.f(1) # E: Too many arguments for "f" of "D"
+        self.g(1)
+[out]
+
+[case testIgnoredModuleDefinesBaseClassWithInheritance2]
+from m import B # type: ignore
+
+class C(B): pass
+class D(C):
+    def f(self) -> None:
+        self.f(1) # E: Too many arguments for "f" of "D"
+        self.g(1)
+[out]
+
+[case testIgnoreWithFollowingIndentedComment]
+if 1:  # type: ignore
+    # blah
+    pass
+[out]
+
+[case testIgnoreTooManyTypeArguments]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+U = TypeVar('U')
+
+class Base(Generic[T, U]):
+  pass
+
+class PartialBase(Base[T, int], Generic[T]):
+  pass
+
+class Child(PartialBase[str, int]):  # type: ignore
+  pass
+
+
+def foo(x: Base[str, int]) -> None: pass
+foo(Child())
+
+def bar(x: Base[str, str]) -> None: pass
+bar(Child())
+[out]
+main:19: error: Argument 1 to "bar" has incompatible type "Child"; expected Base[str, str]
+
+[case testTypeIgnoreLineNumberWithinFile]
+import m
+pass # type: ignore
+m.f(kw=1)
+[file m.py]
+pass
+def f() -> None: pass
+[out]
+main:3: error: Unexpected keyword argument "kw" for "f"
+tmp/m.py:2: note: "f" defined here
+
+[case testIgnoreUnexpectedKeywordArgument]
+import m
+m.f(kw=1)  # type: ignore
+[file m.py]
+def f() -> None: pass
+[out]
+
+[case testCannotIgnoreBlockingError]
+yield  # type: ignore  # E: 'yield' outside function
diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test
new file mode 100644
index 0000000..439d536
--- /dev/null
+++ b/test-data/unit/check-incremental.test
@@ -0,0 +1,1780 @@
+-- Checks for incremental mode (see testcheck.py).
+-- Each test is run twice, once with a cold cache, once with a warm cache.
+-- Before the tests are run the second time, any *.py.next files are copied to *.py.
+--
+-- Errors expected in the first run should be in the `[out1]` section, and
+-- errors expected in the second run should be in the `[out2]` section. If a
+-- section is omitted, it is expected there are no errors on that run.
+--
+-- Any files that we expect to be rechecked should be annotated in the [rechecked]
+-- annotation, and any files expect to be stale (aka have a modified interface)
+-- should be annotated in the [stale] annotation. Note that a file that ends up
+-- producing an error does not create a new cache file and so is not considered stale.
+--
+-- The test suite will automatically assume that __main__ is stale and rechecked in
+-- all cases so we can avoid constantly having to annotate it. The list of
+-- rechecked/stale files can be in any arbitrary order, or can be left empty
+-- if no files should be rechecked/stale.
+
+[case testIncrementalEmpty]
+[rechecked]
+[stale]
+
+[case testIncrementalBasics]
+import m
+[file m.py]
+def foo():
+    pass
+[file m.py.next]
+def foo() -> None:
+    pass
+[rechecked m]
+[stale m]
+
+[case testIncrementalError]
+import m
+[file m.py]
+def foo() -> None:
+    pass
+[file m.py.next]
+def foo() -> None:
+    bar()
+[rechecked m]
+[stale]
+[out2]
+tmp/m.py:2: error: Name 'bar' is not defined
+
+[case testIncrementalSimpleImportSequence]
+import mod1
+mod1.func1()
+
+[file mod1.py]
+import mod2
+def func1() -> None: mod2.func2()
+
+[file mod2.py]
+import mod3
+def func2() -> None: mod3.func3()
+
+[file mod3.py]
+def func3() -> None: pass
+
+[rechecked]
+[stale]
+
+
+[case testIncrementalInternalChangeOnly]
+import mod1
+mod1.func1()
+
+[file mod1.py]
+import mod2
+def func1() -> None: mod2.func2()
+
+[file mod2.py]
+import mod3
+def func2() -> None: mod3.func3()
+
+[file mod3.py]
+def func3() -> None: pass
+
+[file mod3.py.next]
+def func3() -> None: 3 + 2
+
+[rechecked mod3]
+[stale]
+
+
+[case testIncrementalImportGone]
+import mod1
+
+[file mod1.py]
+from mod2 import A
+def func1() -> A: pass
+
+[file mod2.py]
+class A: pass
+
+[file mod1.py.next]
+def func1() -> A: pass
+
+[rechecked mod1]
+[stale]
+[out2]
+tmp/mod1.py:1: error: Name 'A' is not defined
+
+[case testIncrementalSameNameChange]
+import mod1
+
+[file mod1.py]
+from mod2 import A
+def func1() -> A: pass
+
+[file mod2.py]
+class A: pass
+
+[file mod2.py.next]
+class Parent: pass
+class A(Parent): pass
+
+[rechecked mod1, mod2]
+[stale mod2]
+
+[case testIncrementalPartialInterfaceChange]
+import mod1
+mod1.func1()
+
+[file mod1.py]
+import mod2
+def func1() -> None: mod2.func2()
+
+[file mod2.py]
+import mod3
+def func2() -> None: mod3.func3()
+
+[file mod3.py]
+def func3() -> None: pass
+
+[file mod3.py.next]
+def func3() -> int: return 2
+
+[rechecked mod2, mod3]
+[stale mod3]
+
+[case testIncrementalInternalFunctionDefinitionChange]
+import mod1
+
+[file mod1.py]
+import mod2
+def accepts_int(a: int) -> int: return a
+accepts_int(mod2.foo())
+
+[file mod2.py]
+def foo() -> int:
+    def inner() -> int:
+        return 42
+    return inner()
+
+[file mod2.py.next]
+def foo() -> int:
+    def inner2() -> str:
+        return "foo"
+    return inner2()
+
+[rechecked mod2]
+[stale]
+[out2]
+tmp/mod2.py:4: error: Incompatible return value type (got "str", expected "int")
+
+[case testIncrementalInternalScramble]
+import mod1
+
+[file mod1.py]
+import mod2
+mod2.foo()
+
+[file mod2.py]
+def baz() -> int:
+    return 3
+
+def bar() -> int:
+    return baz()
+
+def foo() -> int:
+    return bar()
+
+[file mod2.py.next]
+def foo() -> int:
+    return baz()
+
+def bar() -> int:
+    return bar()
+
+def baz() -> int:
+    return 42
+[rechecked mod2]
+[stale]
+
+[case testIncrementalMethodInterfaceChange]
+import mod1
+
+[file mod1.py]
+import mod2
+
+[file mod2.py]
+class Foo:
+    def bar(self, a: str) -> str:
+        return "a"
+
+[file mod2.py.next]
+class Foo:
+    def bar(self, a: float) -> str:
+        return "a"
+
+[rechecked mod1, mod2]
+[stale mod2]
+
+[case testIncrementalBaseClassChange]
+import mod1
+
+[file mod1.py]
+from mod2 import Child
+Child().good_method()
+
+[file mod2.py]
+class Good:
+    def good_method(self) -> int: return 1
+class Bad: pass
+class Child(Good): pass
+
+[file mod2.py.next]
+class Good:
+    def good_method(self) -> int: return 1
+class Bad: pass
+class Child(Bad): pass
+
+[rechecked mod1, mod2]
+[stale mod2]
+[out2]
+tmp/mod1.py:2: error: "Child" has no attribute "good_method"
+
+[case testIncrementalCascadingChange]
+import mod1
+
+[file mod1.py]
+from mod2 import A
+def accepts_int(a: int) -> None: pass
+accepts_int(A)
+
+[file mod2.py]
+from mod3 import B
+A = B
+
+[file mod3.py]
+from mod4 import C
+B = C
+
+[file mod4.py]
+C = 3
+
+[file mod4.py.next]
+C = "A"
+
+[rechecked mod1, mod2, mod3, mod4]
+[stale mod2, mod3, mod4]
+[out2]
+tmp/mod1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int"
+
+[case testIncrementalBrokenCascade]
+import mod1
+
+[file mod1.py]
+import mod2
+def accept_int(a: int) -> int: return a
+accept_int(mod2.mod3.mod4.const)
+
+[file mod2.py]
+import mod3
+
+[file mod3.py]
+import mod4
+
+[file mod4.py]
+const = 3
+
+[file mod3.py.next]
+# Import to mod4 is gone!
+
+[rechecked mod1, mod2, mod3]
+[stale mod3]
+[builtins fixtures/module.pyi]
+[out2]
+tmp/mod1.py:3: error: "module" has no attribute "mod4"
+
+[case testIncrementalLongBrokenCascade]
+import mod1
+
+[file mod1.py]
+import mod2
+def accept_int(a: int) -> int: return a
+accept_int(mod2.mod3.mod4.mod5.mod6.mod7.const)
+
+[file mod2.py]
+import mod3
+
+[file mod3.py]
+import mod4
+
+[file mod4.py]
+import mod5
+
+[file mod5.py]
+import mod6
+
+[file mod6.py]
+import mod7
+
+[file mod7.py]
+const = 3
+
+[file mod6.py.next]
+# Import to mod7 is gone!
+
+[rechecked mod1, mod5, mod6]
+[stale mod6]
+[builtins fixtures/module.pyi]
+[out2]
+tmp/mod1.py:3: error: "module" has no attribute "mod7"
+
+[case testIncrementalNestedBrokenCascade]
+import mod1
+
+[file mod1.py]
+import mod2
+def accept_int(a: int) -> int: return a
+accept_int(mod2.mod3.mod4.const)
+
+[file mod2/__init__.py]
+import mod2.mod3 as mod3
+
+[file mod2/mod3/__init__.py]
+import mod2.mod3.mod4 as mod4
+
+[file mod2/mod3/__init__.py.next]
+# Import is gone!
+
+[file mod2/mod3/mod4.py]
+const = 3
+
+[rechecked mod1, mod2, mod2.mod3]
+[stale mod2.mod3]
+[builtins fixtures/module.pyi]
+[out2]
+tmp/mod1.py:3: error: "module" has no attribute "mod4"
+
+[case testIncrementalNestedBrokenCascadeWithType1]
+import mod1, mod2.mod3.mod5
+
+[file mod1.py]
+import mod2
+def accept_int(x: int) -> None: pass
+def produce() -> mod2.CustomType:
+    return mod2.CustomType()
+a = produce()
+accept_int(a.foo())
+
+[file mod2/__init__.py]
+from mod2.mod3 import CustomType
+
+[file mod2/mod3/__init__.py]
+from mod2.mod3.mod4 import CustomType
+
+[file mod2/mod3/__init__.py.next]
+# Import a different class that also happens to be called 'CustomType'
+from mod2.mod3.mod5 import CustomType
+def produce() -> CustomType:
+    return CustomType()
+
+[file mod2/mod3/mod4.py]
+class CustomType:
+    def foo(self) -> int: return 1
+
+[file mod2/mod3/mod5.py]
+class CustomType:
+    def foo(self) -> str: return "a"
+
+[rechecked mod1, mod2, mod2.mod3]
+[stale mod2, mod2.mod3]
+[builtins fixtures/module.pyi]
+[out1]
+[out2]
+tmp/mod1.py:6: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int"
+
+[case testIncrementalNestedBrokenCascadeWithType2]
+import mod1, mod2.mod3.mod5
+
+[file mod1.py]
+from mod2 import produce
+def accept_int(x: int) -> None: pass
+a = produce()
+accept_int(a.foo())
+
+[file mod2/__init__.py]
+from mod2.mod3 import produce
+
+[file mod2/mod3/__init__.py]
+from mod2.mod3.mod4 import CustomType
+def produce() -> CustomType:
+    return CustomType()
+
+[file mod2/mod3/__init__.py.next]
+# Import a different class that also happens to be called 'CustomType'
+from mod2.mod3.mod5 import CustomType
+def produce() -> CustomType:
+    return CustomType()
+
+[file mod2/mod3/mod4.py]
+class CustomType:
+    def foo(self) -> int: return 1
+
+[file mod2/mod3/mod5.py]
+class CustomType:
+    def foo(self) -> str: return "a"
+
+[rechecked mod1, mod2, mod2.mod3]
+[stale mod2.mod3]
+[builtins fixtures/module.pyi]
+[out1]
+[out2]
+tmp/mod1.py:4: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int"
+
+[case testIncrementalRemoteChange]
+import mod1
+
+[file mod1.py]
+import mod2
+def accepts_int(a: int) -> None: pass
+accepts_int(mod2.mod3.mod4.const)
+
+[file mod2.py]
+import mod3
+
+[file mod3.py]
+import mod4
+
+[file mod4.py]
+const = 3
+
+[file mod4.py.next]
+const = "foo"
+
+[rechecked mod1, mod3, mod4]
+[stale mod4]
+[out2]
+tmp/mod1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int"
+
+[case testIncrementalBadChange]
+import mod1
+
+[file mod1.py]
+from mod2 import func2
+
+def func1() -> int:
+    return func2()
+
+[file mod2.py]
+def func2() -> int:
+    return 1
+
+[file mod2.py.next]
+def func2() -> str:
+    return "foo"
+
+[rechecked mod1, mod2]
+[stale mod2]
+[out2]
+tmp/mod1.py:4: error: Incompatible return value type (got "str", expected "int")
+
+[case testIncrementalBadChangeWithSave]
+import mod0
+
+[file mod0.py]
+import mod1
+A = mod1.func2()
+
+[file mod1.py]
+from mod2 import func2
+
+def func1() -> int:
+    return func2()
+
+[file mod2.py]
+def func2() -> int:
+    return 1
+
+[file mod2.py.next]
+def func2() -> str:
+    return "foo"
+
+[rechecked mod0, mod1, mod2]
+[stale mod2]
+[out2]
+tmp/mod1.py:4: error: Incompatible return value type (got "str", expected "int")
+
+[case testIncrementalOkChangeWithSave]
+import mod0
+
+[file mod0.py]
+import mod1
+A = mod1.func2()
+
+[file mod1.py]
+from mod2 import func2
+
+def func1() -> int:
+    func2()
+    return 1
+
+[file mod2.py]
+def func2() -> int:
+    return 1
+
+[file mod2.py.next]
+def func2() -> str:
+    return "foo"
+
+[rechecked mod0, mod1, mod2]
+[stale mod0, mod2]
+[out2]
+
+[case testIncrementalWithComplexDictExpression]
+import mod1
+
+[file mod1.py]
+import mod1_private
+
+[file mod1_private.py]
+my_dict = {
+    'a': [1, 2, 3],
+    'b': [4, 5, 6]
+}
+
+[file mod1_private.py.next]
+my_dict = {
+    'a': [1, 2, 3],
+    'b': [4, 5, 'a']
+}
+
+[rechecked mod1, mod1_private]
+[stale mod1_private]
+[builtins fixtures/dict.pyi]
+
+[case testIncrementalWithComplexConstantExpressionNoAnnotation]
+import mod1
+
+[file mod1.py]
+import mod1_private
+
+[file mod1_private.py]
+def foobar() -> int: return 1
+def baz() -> int: return 2
+const = 1 + foobar()
+
+[file mod1_private.py.next]
+def foobar() -> int: return 1
+def baz() -> int: return 2
+const = 1 + baz()
+
+[rechecked mod1_private]
+[stale]
+
+[case testIncrementalWithComplexConstantExpressionWithAnnotation]
+import mod1
+
+[file mod1.py]
+import mod1_private
+
+[file mod1_private.py]
+def foobar() -> int: return 1
+def baz() -> int: return 2
+const = 1 + foobar()  # type: int
+
+[file mod1_private.py.next]
+def foobar() -> int: return 1
+def baz() -> int: return 2
+const = 1 + baz()  # type: int
+
+[rechecked mod1_private]
+[stale]
+
+[case testIncrementalSmall]
+import mod1
+
+[file mod1.py]
+import mod1_private
+def accepts_int(a: int) -> None: pass
+accepts_int(mod1_private.some_func(12))
+
+[file mod1_private.py]
+def some_func(a: int) -> int:
+    return 1
+
+[file mod1_private.py.next]
+def some_func(a: int) -> str:
+    return "a"
+
+[rechecked mod1, mod1_private]
+[stale mod1_private]
+[builtins fixtures/ops.pyi]
+[out2]
+tmp/mod1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int"
+
+[case testIncrementalWithDecorators]
+import mod1
+
+[file mod1.py]
+import mod1_private
+def accepts_int(a: int) -> None: pass
+accepts_int(mod1_private.some_func(12))
+
+[file mod1_private.py]
+from typing import Callable
+def multiply(f: Callable[[int], int]) -> Callable[[int], int]:
+    return lambda a: f(a) * 10
+
+def stringify(f: Callable[[int], int]) -> Callable[[int], str]:
+    return lambda a: str(f(a))
+
+ at multiply
+def some_func(a: int) -> int:
+    return a + 2
+
+[file mod1_private.py.next]
+from typing import Callable
+def multiply(f: Callable[[int], int]) -> Callable[[int], int]:
+    return lambda a: f(a) * 10
+
+def stringify(f: Callable[[int], int]) -> Callable[[int], str]:
+    return lambda a: str(f(a))
+
+ at stringify
+def some_func(a: int) -> int:
+    return a + 2
+[rechecked mod1, mod1_private]
+[stale mod1_private]
+[builtins fixtures/ops.pyi]
+[out2]
+tmp/mod1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int"
+
+[case testIncrementalChangingClassAttributes]
+import mod1
+
+[file mod1.py]
+import mod2
+mod2.Foo.A
+
+[file mod2.py]
+class Foo:
+    A = 3
+
+[file mod2.py.next]
+class Foo:
+    A = "hello"
+
+[rechecked mod1, mod2]
+[stale mod2]
+
+[case testIncrementalChangingFields]
+import mod1
+
+[file mod1.py]
+import mod2
+f = mod2.Foo()
+f.A
+
+[file mod2.py]
+class Foo:
+    def __init__(self) -> None:
+        self.A = 3
+
+[file mod2.py.next]
+class Foo:
+    def __init__(self) -> None:
+        self.A = "hello"
+
+[rechecked mod1, mod2]
+[stale mod2]
+[out2]
+
+[case testIncrementalChangingFieldsWithAssignment]
+import mod1
+
+[file mod1.py]
+import mod2
+f = mod2.Foo()
+B = f.A
+
+[file mod2.py]
+class Foo:
+    def __init__(self) -> None:
+        self.A = 3
+
+[file mod2.py.next]
+class Foo:
+    def __init__(self) -> None:
+        self.A = "hello"
+
+[rechecked mod1, mod2]
+[stale mod1, mod2]
+
+[case testIncrementalCheckingChangingFields]
+import mod1
+
+[file mod1.py]
+import mod2
+def accept_int(a: int) -> int: return a
+f = mod2.Foo()
+accept_int(f.A)
+
+[file mod2.py]
+class Foo:
+    def __init__(self) -> None:
+        self.A = 3
+
+[file mod2.py.next]
+class Foo:
+    def __init__(self) -> None:
+        self.A = "hello"
+
+[rechecked mod1, mod2]
+[stale mod2]
+[out2]
+tmp/mod1.py:4: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int"
+
+[case testIncrementalNestedClassDefinition]
+import mod1
+
+[file mod1.py]
+import mod2
+b = mod2.Foo.Bar()
+b.attr
+
+[file mod2.py]
+class Foo:
+    class Bar:
+        attr = 3
+
+[file mod2.py.next]
+class Foo:
+    class Bar:
+        attr = "foo"
+
+[rechecked mod1, mod2]
+[stale mod2]
+
+[case testIncrementalSimpleBranchingModules]
+import mod1
+import mod2
+
+[file mod1.py]
+def func() -> None: pass
+
+[file mod2.py]
+def func() -> None: pass
+
+[file mod1.py.next]
+def func() -> int: return 1
+
+[rechecked mod1]
+[stale mod1]
+
+[case testIncrementalSubmoduleImport]
+from parent.childA import Foo
+
+def func1() -> Foo:
+    return Foo()
+
+[file parent/__init__.py]
+from parent.childA import Foo
+from parent.childB import Bar
+
+__all__ = ['Foo', 'Bar']
+
+[file parent/childA.py]
+import parent
+
+class Foo:
+    def test(self) -> int:
+        return parent.Bar().test()
+
+[file parent/childB.py]
+class Bar:
+    def test(self) -> int: return 3
+
+[builtins fixtures/module_all.pyi]
+[rechecked]
+[stale]
+
+[case testIncrementalSubmoduleWithAttr]
+import mod.child
+x = mod.child.Foo()
+x.bar()
+
+[file mod/__init__.py]
+
+[file mod/child.py]
+class Foo:
+    def bar(self) -> None: pass
+[builtins fixtures/module.pyi]
+[rechecked]
+[stale]
+
+[case testIncrementalNestedSubmoduleImportFromWithAttr]
+from mod1.mod2 import mod3
+def accept_int(a: int) -> None: pass
+
+accept_int(mod3.val3)
+
+[file mod1/__init__.py]
+val1 = 1
+
+[file mod1/mod2/__init__.py]
+val2 = 1
+
+[file mod1/mod2/mod3.py]
+val3 = 1
+
+[builtins fixtures/module.pyi]
+[rechecked]
+[stale]
+
+[case testIncrementalNestedSubmoduleWithAttr]
+import mod1.mod2.mod3
+def accept_int(a: int) -> None: pass
+
+accept_int(mod1.mod2.mod3.val3)
+accept_int(mod1.mod2.val2)
+accept_int(mod1.val1)
+
+[file mod1/__init__.py]
+val1 = 1
+
+[file mod1/mod2/__init__.py]
+val2 = 1
+
+[file mod1/mod2/mod3.py]
+val3 = 1
+
+[builtins fixtures/module.pyi]
+[rechecked]
+[stale]
+
+[case testIncrementalSubmoduleParentWithImportFrom]
+import parent
+
+[file parent/__init__.py]
+from parent import a
+
+[file parent/a.py]
+val = 3
+
+[builtins fixtures/args.pyi]
+[stale]
+
+[case testIncrementalSubmoduleParentBackreference]
+import parent
+
+[file parent/__init__.py]
+from parent import a
+
+[file parent/a.py]
+import parent.b
+
+[file parent/b.py]
+
+[builtins fixtures/args.pyi]
+[stale]
+
+[case testIncrementalSubmoduleParentBackreferenceComplex]
+import parent
+
+[file parent/__init__.py]
+import parent.a
+
+[file parent/a.py]
+import parent.b
+import parent.c
+
+[file parent/b.py]
+import parent.a
+
+[file parent/c.py]
+import parent.a
+
+[builtins fixtures/args.pyi]
+[stale]
+
+[case testIncrementalReferenceNewFileWithImportFrom]
+from parent import a
+
+[file parent/__init__.py]
+
+[file parent/a.py]
+
+[file parent/a.py.next]
+from parent import b
+
+[file parent/b.py.next]
+
+[stale parent, parent.a, parent.b]
+
+[case testIncrementalReferenceExistingFileWithImportFrom]
+from parent import a, b
+
+[file parent/__init__.py]
+
+[file parent/a.py]
+
+[file parent/b.py]
+
+[file parent/a.py.next]
+from parent import b
+
+[stale parent.a]
+
+[case testIncrementalWithTypeIgnoreOnDirectImport]
+import a, b
+
+[file a.py]
+import b  # type: ignore
+
+[file b.py]
+import c
+
+[file c.py]
+
+[stale]
+
+[case testIncrementalWithTypeIgnoreOnImportFrom]
+import a, b
+
+[file a.py]
+from b import something # type: ignore
+
+[file b.py]
+import c
+something = 3
+
+[file c.py]
+
+[stale]
+
+[case testIncrementalWithPartialTypeIgnore]
+import a  # type: ignore
+import a.b
+
+[file a/__init__.py]
+
+[file a/b.py]
+
+[stale]
+
+[case testIncrementalAnyIsDifferentFromIgnore]
+import b
+
+[file b.py]
+from typing import Any
+import a.b
+
+[file b.py.next]
+from typing import Any
+
+a = 3  # type: Any
+import a.b
+
+[file a/__init__.py]
+
+[file a/b.py]
+
+[rechecked b]
+[stale]
+[out2]
+tmp/b.py:4: error: Name 'a' already defined
+
+[case testIncrementalSilentImportsAndImportsInClass]
+# flags: --ignore-missing-imports
+class MyObject(object):
+    from bar import FooBar
+[stale]
+
+[case testIncrementalSameFileSize]
+import m
+
+[file m.py]
+def foo(a: int) -> None: pass
+def bar(a: str) -> None: pass
+
+foo(3)
+
+[file m.py.next]
+def foo(a: int) -> None: pass
+def bar(a: str) -> None: pass
+
+bar(3)
+
+[rechecked m]
+[stale]
+[out2]
+tmp/m.py:4: error: Argument 1 to "bar" has incompatible type "int"; expected "str"
+
+[case testIncrementalUnsilencingModule]
+# cmd: mypy -m main package.subpackage.mod2
+# cmd2: mypy -m main package.subpackage.mod1
+# flags: --follow-imports=skip
+
+[file main.py]
+from package.subpackage.mod1 import Class
+
+def handle(c: Class) -> None:
+    c.some_attribute
+
+[file package/__init__.py]
+# empty
+
+[file package/subpackage/__init__.py]
+# empty
+
+[file package/subpackage/mod1.py]
+import collections # Any previously unloaded package works here
+
+class Class: pass
+
+[file package/subpackage/mod2.py]
+# empty
+
+[builtins fixtures/args.pyi]
+[rechecked collections, main, package.subpackage.mod1]
+[stale collections, package.subpackage.mod1]
+[out2]
+tmp/main.py:4: error: "Class" has no attribute "some_attribute"
+
+[case testIncrementalWithIgnores]
+import foo # type: ignore
+
+[builtins fixtures/module.pyi]
+[stale]
+
+[case testIncrementalWithSilentImportsAndIgnore]
+# cmd: mypy -m main b
+# cmd2: mypy -m main c c.submodule
+# flags: --follow-imports=skip
+
+[file main.py]
+import a  # type: ignore
+import b
+import c
+
+a.A().foo()
+b.B().foo()
+c.C().foo()
+
+[file b.py]
+class B:
+    def foo(self) -> None: pass
+
+[file b.py.next]
+
+[file c/__init__.py]
+class C: pass
+
+[file c/submodule.py]
+val = 3  # type: int
+val = "foo"
+
+[builtins fixtures/module_all.pyi]
+[rechecked main, c, c.submodule]
+[stale]
+[out2]
+tmp/c/submodule.py:2: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+tmp/main.py:7: error: "C" has no attribute "foo"
+
+[case testIncrementalRemoteError]
+import m
+m.C().foo().bar()
+[file m.py]
+import n
+class C:
+  def foo(self) -> n.A: pass
+[file n.py]
+class A:
+  def bar(self): pass
+[file n.py.next]
+class A:
+  pass
+[rechecked m, n]
+[stale n]
+[out2]
+main:2: error: "A" has no attribute "bar"
+
+[case testIncrementalRemoteErrorFixed]
+import m
+m.C().foo().bar()
+[file m.py]
+import n
+class C:
+  def foo(self) -> n.A: pass
+[file n.py]
+class A:
+  pass
+[file n.py.next]
+class A:
+  def bar(self): pass
+[rechecked m, n]
+[stale n]
+[out1]
+main:2: error: "A" has no attribute "bar"
+
+[case testIncrementalChangedError]
+import m
+[file m.py]
+import n
+def accept_int(x: int) -> None: pass
+accept_int(n.foo)
+[file n.py]
+foo = "hello"
+reveal_type(foo)
+[file n.py.next]
+foo = 3.14
+reveal_type(foo)
+[rechecked m, n]
+[stale]
+[out1]
+tmp/n.py:2: error: Revealed type is 'builtins.str'
+tmp/m.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int"
+[out2]
+tmp/n.py:2: error: Revealed type is 'builtins.float'
+tmp/m.py:3: error: Argument 1 to "accept_int" has incompatible type "float"; expected "int"
+
+[case testIncrementalReplacingImports]
+import good, bad, client
+
+[file good.py]
+def foo(a: int) -> None: pass
+
+[file bad.py]
+def foo(a: str) -> None: pass
+
+[file client.py]
+import good
+import bad
+from good import foo
+foo(3)
+
+[file client.py.next]
+import good
+import bad
+from bad import foo
+foo(3)
+
+[rechecked client]
+[stale]
+[out2]
+tmp/client.py:4: error: Argument 1 to "foo" has incompatible type "int"; expected "str"
+
+[case testIncrementalChangingAlias]
+import m1, m2, m3, m4, m5
+
+[file m1.py]
+from m2 import A
+def accepts_int(x: int) -> None: pass
+accepts_int(A())
+
+[file m2.py]
+from m3 import A
+
+[file m3.py]
+from m4 import B
+A = B
+
+[file m3.py.next]
+from m5 import C
+A = C
+
+[file m4.py]
+def B() -> int:
+    return 42
+
+[file m5.py]
+def C() -> str:
+    return "hello"
+
+[rechecked m1, m2, m3]
+[stale m3]
+[out2]
+tmp/m1.py:3: error: Argument 1 to "accepts_int" has incompatible type "str"; expected "int"
+
+[case testIncrementalSilentImportsWithBlatantError]
+# cmd: mypy -m main
+# flags: --follow-imports=skip
+
+[file main.py]
+from evil import Hello
+
+[file main.py.next]
+from evil import Hello
+reveal_type(Hello())
+
+[file evil.py]
+def accept_int(x: int) -> None: pass
+accept_int("not an int")
+
+[rechecked main]
+[stale]
+[out2]
+tmp/main.py:2: error: Revealed type is 'Any'
+
+[case testIncrementalImportIsNewlySilenced]
+# cmd: mypy -m main foo
+# cmd2: mypy -m main
+# flags: --follow-imports=skip
+
+[file main.py]
+from foo import bar
+def accept_int(x: int) -> None: pass
+accept_int(bar)
+
+[file foo.py]
+bar = 3
+
+[file foo.py.next]
+# Empty!
+
+[rechecked main]
+[stale main]
+
+[case testIncrementalSilencedModuleNoLongerCausesError]
+# cmd: mypy -m main evil
+# cmd2: mypy -m main
+# flags: --follow-imports=skip
+
+[file main.py]
+from evil import bar
+def accept_int(x: int) -> None: pass
+accept_int(bar)
+reveal_type(bar)
+
+[file evil.py]
+bar = "str"
+
+[rechecked main]
+[stale]
+[out1]
+tmp/main.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int"
+tmp/main.py:4: error: Revealed type is 'builtins.str'
+[out2]
+tmp/main.py:4: error: Revealed type is 'Any'
+
+[case testIncrementalFixedBugCausesPropagation]
+import mod1
+
+[file mod1.py]
+from mod2 import A
+val = A().makeB().makeC().foo()
+reveal_type(val)
+
+[file mod2.py]
+from mod3 import B
+class A:
+    def makeB(self) -> B: return B()
+
+[file mod3.py]
+from mod4 import C
+class B:
+    def makeC(self) -> C:
+        val = 3  # type: int
+        val = "str"   # deliberately triggering error
+        return C()
+
+[file mod3.py.next]
+from mod4 import C
+class B:
+    def makeC(self) -> C: return C()
+
+[file mod4.py]
+class C:
+    def foo(self) -> int: return 1
+
+[rechecked mod3, mod2, mod1]
+[stale mod3, mod2]
+[out1]
+tmp/mod3.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+tmp/mod1.py:3: error: Revealed type is 'builtins.int'
+
+[out2]
+tmp/mod1.py:3: error: Revealed type is 'builtins.int'
+
+[case testIncrementalIncidentalChangeWithBugCausesPropagation]
+import mod1
+
+[file mod1.py]
+from mod2 import A
+val = A().makeB().makeC().foo()
+reveal_type(val)
+
+[file mod2.py]
+from mod3 import B
+class A:
+    def makeB(self) -> B: return B()
+
+[file mod3.py]
+from mod4 import C
+class B:
+    def makeC(self) -> C:
+        val = 3  # type: int
+        val = "str"   # deliberately triggering error
+        return C()
+
+[file mod4.py]
+class C:
+    def foo(self) -> int: return 1
+
+[file mod4.py.next]
+class C:
+    def foo(self) -> str: return 'a'
+
+[rechecked mod4, mod3, mod2, mod1]
+[stale mod4]
+[out1]
+tmp/mod3.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+tmp/mod1.py:3: error: Revealed type is 'builtins.int'
+
+[out2]
+tmp/mod3.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+tmp/mod1.py:3: error: Revealed type is 'builtins.str'
+
+[case testIncrementalIncidentalChangeWithBugFixCausesPropagation]
+import mod1
+
+[file mod1.py]
+from mod2 import A
+val = A().makeB().makeC().foo()
+reveal_type(val)
+
+[file mod2.py]
+from mod3 import B
+class A:
+    def makeB(self) -> B: return B()
+
+[file mod3.py]
+from mod4 import C
+class B:
+    def makeC(self) -> C:
+        val = 3  # type: int
+        val = "str"   # deliberately triggering error
+        return C()
+
+[file mod3.py.next]
+from mod4 import C
+class B:
+    def makeC(self) -> C: return C()
+
+[file mod4.py]
+class C:
+    def foo(self) -> int: return 1
+
+[file mod4.py.next]
+class C:
+    def foo(self) -> str: return 'a'
+
+[rechecked mod4, mod3, mod2, mod1]
+[stale mod4, mod3, mod2]
+[out1]
+tmp/mod3.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+tmp/mod1.py:3: error: Revealed type is 'builtins.int'
+
+[out2]
+tmp/mod1.py:3: error: Revealed type is 'builtins.str'
+
+[case testIncrementalSilentImportsWithInnerImports]
+# cmd: mypy -m main foo
+# flags: --ignore-missing-imports
+
+[file main.py]
+from foo import MyClass
+m = MyClass()
+
+[file main.py.next]
+from foo import MyClass
+m = MyClass()
+reveal_type(m.val)
+
+[file foo.py]
+class MyClass:
+    def __init__(self) -> None:
+        import unrelated
+        self.val = unrelated.test()
+
+[rechecked main]
+[stale]
+[out2]
+tmp/main.py:3: error: Revealed type is 'Any'
+
+[case testIncrementalSilentImportsWithInnerImportsAndNewFile]
+# cmd: mypy -m main foo
+# cmd2: mypy -m main foo unrelated
+# flags: --follow-imports=skip
+
+[file main.py]
+from foo import MyClass
+m = MyClass()
+
+[file main.py.next]
+from foo import MyClass
+m = MyClass()
+reveal_type(m.val)
+
+[file foo.py]
+class MyClass:
+    def __init__(self) -> None:
+        import unrelated
+        self.val = unrelated.test()
+
+[file unrelated.py]
+def test() -> str: return "foo"
+
+[rechecked main, foo, unrelated]
+[stale foo, unrelated]
+[out2]
+tmp/main.py:3: error: Revealed type is 'builtins.str'
+
+[case testIncrementalWorksWithNestedClasses]
+import foo
+
+[file foo.py]
+class MyClass:
+    class NestedClass:
+        pass
+
+    class_attr = NestedClass()
+
+[rechecked]
+[stale]
+
+[case testIncrementalWorksWithNamedTuple]
+import foo
+
+[file foo.py]
+from mid import MyTuple
+def accept_int(x: int) -> None: pass
+accept_int(MyTuple(1, "b", "c").a)
+
+[file mid.py]
+from bar import MyTuple
+
+[file bar.py]
+from typing import NamedTuple
+MyTuple = NamedTuple('MyTuple', [
+    ('a', int),
+    ('b', str),
+    ('c', str)
+])
+
+[file bar.py.next]
+from typing import NamedTuple
+MyTuple = NamedTuple('MyTuple', [
+    ('b', int),  # a and b are swapped
+    ('a', str),
+    ('c', str)
+])
+
+[rechecked bar, mid, foo]
+[stale bar]
+[out2]
+tmp/foo.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int"
+
+[case testIncrementalWorksWithNestedNamedTuple]
+import foo
+
+[file foo.py]
+from mid import Outer
+def accept_int(x: int) -> None: pass
+accept_int(Outer.MyTuple(1, "b", "c").a)
+
+[file mid.py]
+from bar import Outer
+
+[file bar.py]
+from typing import NamedTuple
+class Outer:
+    MyTuple = NamedTuple('MyTuple', [
+        ('a', int),
+        ('b', str),
+        ('c', str)
+    ])
+
+[file bar.py.next]
+from typing import NamedTuple
+class Outer:
+    MyTuple = NamedTuple('MyTuple', [
+        ('b', int),  # a and b are swapped
+        ('a', str),
+        ('c', str)
+    ])
+
+[rechecked bar, mid, foo]
+[stale bar]
+[out2]
+tmp/foo.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int"
+
+[case testIncrementalPartialSubmoduleUpdate]
+# cmd: mypy -m a
+# cmd2: mypy -m a a.c
+# flags: --follow-imports=skip
+
+[file a/__init__.py]
+from .b import B
+from .c import C
+
+[file a/b.py]
+class B: pass
+
+[file a/c.py]
+class C: pass
+
+[file a/c.py.next]
+class C: pass
+pass
+
+[rechecked a, a.c]
+[stale a, a.c]
+[out]
+
+[case testIncrementalNestedClassRef]
+import top
+
+[file top.py]
+from funcs import callee
+from classes import Outer
+def caller(a: Outer.Inner) -> None:
+    callee(a)
+
+[file funcs.py]
+from classes import Outer
+def callee(a: Outer.Inner) -> None:
+    pass
+
+[file classes.py]
+class Outer:
+    class Inner:
+        pass
+
+[file top.py.next]
+from funcs import callee
+from classes import Outer
+def caller(a: Outer.Inner) -> int:
+    callee(a)
+
+[case testIncrementalLoadsParentAfterChild]
+# cmd: mypy -m r.s
+
+[file r/__init__.py]
+from . import s
+
+[file r/m.py]
+class R: pass
+
+[file r/s.py]
+from . import m
+R = m.R
+a = None  # type: R
+
+[file r/s.py.next]
+from . import m
+R = m.R
+a = None  # type: R
+
+[case testIncrementalBaseClassAttributeConflict]
+class A: pass
+class B: pass
+
+class X:
+    attr = None  # type: A
+class Y:
+    attr = None  # type: B
+class Z(X, Y): pass
+[stale]
+[out]
+main:8: error: Definition of "attr" in base class "X" is incompatible with definition in base class "Y"
+[out2]
+main:8: error: Definition of "attr" in base class "X" is incompatible with definition in base class "Y"
+
+[case testIncrementalFollowImportsSilent]
+# flags: --follow-imports=silent
+import a
+[file a.py]
+x = 0
+[file a.py.next]
+x = 0
+x + ''
+
+[case testIncrementalFollowImportsSkip]
+# flags: --follow-imports=skip
+import a
+reveal_type(a.x)
+[file a.py]
+/
+[file a.py.next]
+//
+[out]
+main:3: error: Revealed type is 'Any'
+[out2]
+main:3: error: Revealed type is 'Any'
+
+[case testIncrementalFollowImportsError]
+# flags: --follow-imports=error
+import a
+[file a.py]
+/
+[file a.py.next]
+//
+[out1]
+main:2: note: Import of 'a' ignored
+main:2: note: (Using --follow-imports=error, module not passed on command line)
+[out2]
+main:2: note: Import of 'a' ignored
+main:2: note: (Using --follow-imports=error, module not passed on command line)
+
+[case testIncrementalFollowImportsVariable]
+# flags: --config-file tmp/mypy.ini
+import a
+reveal_type(a.x)
+[file a.py]
+x = 0
+[file mypy.ini]
+[[mypy]
+follow_imports = normal
+[file mypy.ini.next]
+[[mypy]
+follow_imports = skip
+[out1]
+main:3: error: Revealed type is 'builtins.int'
+[out2]
+main:3: error: Revealed type is 'Any'
+
+[case testIncrementalNamedTupleInMethod]
+from ntcrash import nope
+[file ntcrash.py]
+from typing import NamedTuple
+class C:
+    def f(self) -> None:
+        A = NamedTuple('A', [('x', int), ('y', int)])
+[out1]
+main:1: error: Module 'ntcrash' has no attribute 'nope'
+[out2]
+main:1: error: Module 'ntcrash' has no attribute 'nope'
+
+[case testIncrementalNamedTupleInMethod2]
+from ntcrash import nope
+[file ntcrash.py]
+from typing import NamedTuple
+class C:
+    class D:
+        def f(self) -> None:
+            A = NamedTuple('A', [('x', int), ('y', int)])
+[out1]
+main:1: error: Module 'ntcrash' has no attribute 'nope'
+[out2]
+main:1: error: Module 'ntcrash' has no attribute 'nope'
+
+[case testIncrementalNamedTupleInMethod3]
+from ntcrash import nope
+[file ntcrash.py]
+from typing import NamedTuple
+class C:
+    def a(self):
+        class D:
+            def f(self) -> None:
+                A = NamedTuple('A', [('x', int), ('y', int)])
+[out1]
+main:1: error: Module 'ntcrash' has no attribute 'nope'
+[out2]
+main:1: error: Module 'ntcrash' has no attribute 'nope'
+
+[case testIncrementalNamedTupleInMethod4]
+from ntcrash import C
+reveal_type(C().a)
+reveal_type(C().b)
+reveal_type(C().c)
+[file ntcrash.py]
+from typing import NamedTuple
+class C:
+    def __init__(self) -> None:
+        A = NamedTuple('A', [('x', int)])
+        self.a = A(0)
+        self.b = A(0)  # type: A
+        self.c = A
+[out1]
+main:2: error: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A at 4]'
+main:3: error: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A at 4]'
+main:4: error: Revealed type is 'def (x: builtins.int) -> Tuple[builtins.int, fallback=ntcrash.C.A at 4]'
+[out2]
+main:2: error: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A at 4]'
+main:3: error: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A at 4]'
+main:4: error: Revealed type is 'def (x: builtins.int) -> Tuple[builtins.int, fallback=ntcrash.C.A at 4]'
+
+[case testIncrementalTypedDictInMethod]
+from tdcrash import nope
+[file tdcrash.py]
+from mypy_extensions import TypedDict
+class C:
+    def f(self) -> None:
+        A = TypedDict('A', {'x': int, 'y': int})
+[builtins fixtures/dict.pyi]
+[out1]
+main:1: error: Module 'tdcrash' has no attribute 'nope'
+[out2]
+main:1: error: Module 'tdcrash' has no attribute 'nope'
+
+[case testIncrementalTypedDictInMethod2]
+from tdcrash import nope
+[file tdcrash.py]
+from mypy_extensions import TypedDict
+class C:
+    class D:
+        def f(self) -> None:
+            A = TypedDict('A', {'x': int, 'y': int})
+[builtins fixtures/dict.pyi]
+[out1]
+main:1: error: Module 'tdcrash' has no attribute 'nope'
+[out2]
+main:1: error: Module 'tdcrash' has no attribute 'nope'
+
+[case testIncrementalTypedDictInMethod3]
+from tdcrash import nope
+[file tdcrash.py]
+from mypy_extensions import TypedDict
+class C:
+    def a(self):
+        class D:
+            def f(self) -> None:
+                A = TypedDict('A', {'x': int, 'y': int})
+[builtins fixtures/dict.pyi]
+[out1]
+main:1: error: Module 'tdcrash' has no attribute 'nope'
+[out2]
+main:1: error: Module 'tdcrash' has no attribute 'nope'
+
+[case testIncrementalTypedDictInMethod4]
+from ntcrash import C
+reveal_type(C().a)
+reveal_type(C().b)
+reveal_type(C().c)
+[file ntcrash.py]
+from mypy_extensions import TypedDict
+class C:
+    def __init__(self) -> None:
+        A = TypedDict('A', {'x': int})
+        self.a = A(x=0)
+        self.b = A(x=0)  # type: A
+        self.c = A
+[builtins fixtures/dict.pyi]
+[out1]
+main:2: error: Revealed type is 'TypedDict(x=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])'
+main:3: error: Revealed type is 'TypedDict(x=builtins.int, _fallback=ntcrash.C.A at 4)'
+main:4: error: Revealed type is 'def () -> ntcrash.C.A at 4'
+[out2]
+main:2: error: Revealed type is 'TypedDict(x=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])'
+main:3: error: Revealed type is 'TypedDict(x=builtins.int, _fallback=ntcrash.C.A at 4)'
+main:4: error: Revealed type is 'def () -> ntcrash.C.A at 4'
+
+[case testIncrementalPerFileFlags]
+# flags: --config-file tmp/mypy.ini
+import a
+[file a.py]
+pass
+[file mypy.ini]
+[[mypy]
+warn_no_return = False
+[[mypy-a]
+warn_no_return = True
+[rechecked]
diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test
new file mode 100644
index 0000000..f93429e
--- /dev/null
+++ b/test-data/unit/check-inference-context.test
@@ -0,0 +1,880 @@
+
+
+-- Basic test cases
+-- ----------------
+
+
+[case testBasicContextInference]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+ab = None # type: A[B]
+ao = None # type: A[object]
+b = None # type: B
+
+ao = f()
+ab = f()
+b = f() # E: Incompatible types in assignment (expression has type A[None], variable has type "B")
+
+def f() -> 'A[T]': pass
+
+class A(Generic[T]): pass
+class B: pass
+
+[case testBasicContextInferenceForConstructor]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+ab = None # type: A[B]
+ao = None # type: A[object]
+b = None # type: B
+
+ao = A()
+ab = A()
+b = A() # E: Incompatible types in assignment (expression has type A[None], variable has type "B")
+
+class A(Generic[T]): pass
+class B: pass
+
+[case testIncompatibleContextInference]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+b = None # type: B
+c = None # type: C
+ab = None # type: A[B]
+ao = None # type: A[object]
+ac = None # type: A[C]
+
+ac = f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "C"
+ab = f(c) # E: Argument 1 to "f" has incompatible type "C"; expected "B"
+
+ao = f(b)
+ab = f(b)
+ao = f(c)
+ac = f(c)
+
+def f(a: T) -> 'A[T]':
+    pass
+
+class A(Generic[T]): pass
+
+class B: pass
+class C: pass
+
+
+-- Local variables
+-- ---------------
+
+
+[case testInferGenericLocalVariableTypeWithEmptyContext]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+def g() -> None:
+    ao = None # type: A[object]
+    ab = None # type: A[B]
+    o = None # type: object
+    b = None # type: B
+
+    x = f(o)
+    ab = x # E: Incompatible types in assignment (expression has type A[object], variable has type A[B])
+    ao = x
+    y = f(b)
+    ao = y # E: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+    ab = y
+
+def f(a: T) -> 'A[T]': pass
+
+class A(Generic[T]): pass
+class B: pass
+[out]
+
+[case testInferLocalVariableTypeWithUnderspecifiedGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+def g() -> None:
+    x = f() # E: Need type annotation for variable
+
+def f() -> 'A[T]': pass
+class A(Generic[T]): pass
+[out]
+
+[case testInferMultipleLocalVariableTypesWithTupleRvalue]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+def g() -> None:
+    ao = None # type: A[object]
+    ab = None # type: A[B]
+    b = None # type: B
+    x, y = f(b), f(b)
+    ao = x # E: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+    ao = y # E: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+    ab = x
+    ab = y
+
+def f(a: T) -> 'A[T]': pass
+class A(Generic[T]): pass
+class B: pass
+[out]
+
+[case testInferMultipleLocalVariableTypesWithArrayRvalueAndNesting]
+from typing import TypeVar, List, Generic
+T = TypeVar('T')
+def h() -> None:
+    ao = None # type: A[object]
+    ab = None # type: A[B]
+    b = None # type: B
+    x, y = g(f(b))
+    ao = x # E: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+    ao = y # E: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+    ab = x
+    ab = y
+
+def f(a: T) -> 'A[T]': pass
+def g(a: T) -> List[T]: pass
+
+class A(Generic[T]): pass
+class B: pass
+[builtins fixtures/for.pyi]
+[out]
+
+
+-- Return types with multiple tvar instances
+-- -----------------------------------------
+
+
+[case testInferenceWithTypeVariableTwiceInReturnType]
+from typing import TypeVar, Tuple, Generic
+T = TypeVar('T')
+b = None # type: B
+o = None # type: object
+ab = None # type: A[B]
+ao = None # type: A[object]
+
+ab, ao = f(b) # Fail
+ao, ab = f(b) # Fail
+
+ao, ao = f(b)
+ab, ab = f(b)
+ao, ao = f(o)
+
+def f(a: T) -> 'Tuple[A[T], A[T]]': pass
+
+class A(Generic[T]): pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+[out]
+main:8: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+main:9: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+
+[case testInferenceWithTypeVariableTwiceInReturnTypeAndMultipleVariables]
+from typing import TypeVar, Tuple, Generic
+S = TypeVar('S')
+T = TypeVar('T')
+b = None # type: B
+o = None # type: object
+ab = None # type: A[B]
+ao = None # type: A[object]
+
+ao, ao, ab = f(b, b)     # Fail
+ao, ab, ao = g(b, b)     # Fail
+ao, ab, ab, ab = h(b, b) # Fail
+ab, ab, ao, ab = h(b, b) # Fail
+
+ao, ab, ab = f(b, b)
+ab, ab, ao = g(b, b)
+ab, ab, ab, ab = h(b, b)
+
+def f(a: S, b: T) -> 'Tuple[A[S], A[T], A[T]]': pass
+def g(a: S, b: T) -> 'Tuple[A[S], A[S], A[T]]': pass
+def h(a: S, b: T) -> 'Tuple[A[S], A[S], A[T], A[T]]': pass
+
+class A(Generic[T]): pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+[out]
+main:9: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+main:10: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+main:11: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+main:12: error: Incompatible types in assignment (expression has type A[B], variable has type A[object])
+
+
+-- Multiple tvar instances in arguments
+-- ------------------------------------
+
+
+[case testMultipleTvatInstancesInArgs]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+ac = None # type: A[C]
+ab = None # type: A[B]
+ao = None # type: A[object]
+b = None # type: B
+c = None # type: C
+o = None # type: object
+
+ab = f(b, o) # E: Argument 2 to "f" has incompatible type "object"; expected "B"
+ab = f(o, b) # E: Argument 1 to "f" has incompatible type "object"; expected "B"
+ac = f(b, c) # E: Argument 1 to "f" has incompatible type "B"; expected "C"
+ac = f(c, b) # E: Argument 2 to "f" has incompatible type "B"; expected "C"
+
+ao = f(b, c)
+ao = f(c, b)
+ab = f(c, b)
+
+def f(a: T, b: T) -> 'A[T]': pass
+
+class A(Generic[T]): pass
+class B: pass
+class C(B): pass
+
+
+-- Nested generic function calls
+-- -----------------------------
+
+
+[case testNestedGenericFunctionCall1]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+aab = None # type: A[A[B]]
+aao = None # type: A[A[object]]
+ao = None # type: A[object]
+b = None # type: B
+o = None # type: object
+
+aab = f(f(o)) # E: Argument 1 to "f" has incompatible type "object"; expected "B"
+
+aab = f(f(b))
+aao = f(f(b))
+ao = f(f(b))
+
+def f(a: T) -> 'A[T]': pass
+
+class A(Generic[T]): pass
+class B: pass
+
+[case testNestedGenericFunctionCall2]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+ab = None # type: A[B]
+ao = None # type: A[object]
+b = None # type: B
+o = None # type: object
+
+ab = f(g(o)) # E: Argument 1 to "g" has incompatible type "object"; expected "B"
+
+ab = f(g(b))
+ao = f(g(b))
+
+def f(a: T) -> T: pass
+
+def g(a: T) -> 'A[T]': pass
+
+class A(Generic[T]): pass
+class B: pass
+
+[case testNestedGenericFunctionCall3]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+ab = None # type: A[B]
+ao = None # type: A[object]
+b = None # type: B
+o = None # type: object
+
+ab = f(g(o), g(b)) # E: Argument 1 to "g" has incompatible type "object"; expected "B"
+ab = f(g(b), g(o)) # E: Argument 1 to "g" has incompatible type "object"; expected "B"
+
+ab = f(g(b), g(b))
+ao = f(g(b), g(o))
+ao = f(g(o), g(b))
+
+def f(a: T, b: T) -> T:
+    pass
+
+def g(a: T) -> 'A[T]': pass
+
+class A(Generic[T]): pass
+class B: pass
+
+
+-- Method calls
+-- ------------
+
+
+[case testMethodCallWithContextInference]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+o = None # type: object
+b = None # type: B
+c = None # type: C
+ao = None # type: A[object]
+ab = None # type: A[B]
+ac = None # type: A[C]
+
+ab.g(f(o))        # E: Argument 1 to "f" has incompatible type "object"; expected "B"
+ac = f(b).g(f(c)) # E: Incompatible types in assignment (expression has type A[B], variable has type A[C])
+ac = f(c).g(f(b)) # E: Argument 1 to "f" has incompatible type "B"; expected "C"
+
+ab = f(b).g(f(c))
+ab.g(f(c))
+
+def f(a: T) -> 'A[T]': pass
+
+class A(Generic[T]):
+    def g(self, a: 'A[T]') -> 'A[T]': pass
+
+class B: pass
+class C(B): pass
+
+
+-- List expressions
+-- ----------------
+
+
+[case testEmptyListExpression]
+from typing import List
+aa = None # type: List[A]
+ao = None # type: List[object]
+a = None # type: A
+
+a = [] # E: Incompatible types in assignment (expression has type List[None], variable has type "A")
+
+aa = []
+ao = []
+
+class A: pass
+[builtins fixtures/list.pyi]
+
+[case testSingleItemListExpressions]
+from typing import List
+aa = None # type: List[A]
+ab = None # type: List[B]
+ao = None # type: List[object]
+a = None # type: A
+b = None # type: B
+
+aa = [b] # E: List item 0 has incompatible type "B"
+ab = [a] # E: List item 0 has incompatible type "A"
+
+aa = [a]
+ab = [b]
+ao = [a]
+aa = [None]
+ao = [None]
+
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+[case testMultiItemListExpressions]
+from typing import List
+aa = None # type: List[A]
+ab = None # type: List[B]
+ao = None # type: List[object]
+a = None # type: A
+b = None # type: B
+
+ab = [b, a] # E: List item 1 has incompatible type "A"
+ab = [a, b] # E: List item 0 has incompatible type "A"
+
+aa = [a, b, a]
+ao = [a, b]
+
+class A: pass
+class B(A): pass
+[builtins fixtures/list.pyi]
+
+[case testLocalVariableInferenceFromEmptyList]
+import typing
+def f() -> None:
+    a = []     # E: Need type annotation for variable
+    b = [None]  # E: Need type annotation for variable
+    c = [B()]
+    c = [object()] # E: List item 0 has incompatible type "object"
+    c = [B()]
+class B: pass
+[builtins fixtures/list.pyi]
+[out]
+
+[case testNestedListExpressions]
+from typing import List
+aao = None # type: List[List[object]]
+aab = None # type: List[List[B]]
+ab = None # type: List[B]
+b = None # type: B
+o = None # type: object
+
+aao = [[o], ab] # E: List item 1 has incompatible type List[B]
+aab = [[], [o]] # E: List item 0 has incompatible type "object"
+
+aao = [[None], [b], [], [o]]
+aab = [[None], [b], []]
+aab = [ab, []]
+
+class B: pass
+[builtins fixtures/list.pyi]
+
+
+-- Complex context
+-- ---------------
+
+
+[case testParenthesesAndContext]
+from typing import List
+l = ([A()]) # type: List[object]
+class A: pass
+[builtins fixtures/list.pyi]
+
+[case testComplexTypeInferenceWithTuple]
+from typing import TypeVar, Tuple, Generic
+k = TypeVar('k')
+t = TypeVar('t')
+v = TypeVar('v')
+def f(x: Tuple[k]) -> 'A[k]': pass
+
+d = f((A(),)) # type: A[A[B]]
+
+class A(Generic[t]): pass
+class B: pass
+class C: pass
+class D(Generic[k, v]): pass
+[builtins fixtures/list.pyi]
+
+
+-- Dictionary literals
+-- -------------------
+
+
+[case testDictionaryLiteralInContext]
+from typing import Dict, TypeVar, Generic
+t = TypeVar('t')
+class A(Generic[t]): pass
+class B: pass
+class C: pass
+a_b = A() # type: A[B]
+a_c = A() # type: A[C]
+d = {A() : a_c,
+     a_b : A()} # type: Dict[A[B], A[C]]
+[builtins fixtures/dict.pyi]
+
+
+-- Special cases (regression tests etc.)
+-- -------------------------------------
+
+
+[case testInitializationWithInferredGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+c = f(A()) # type: C[A] # E: Argument 1 to "f" has incompatible type "A"; expected C[A]
+
+def f(x: T) -> T: pass
+class C(Generic[T]): pass
+class A: pass
+
+[case testInferredGenericTypeAsReturnValue]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+def t() -> 'A[B]':
+    return f(D()) # E: Argument 1 to "f" has incompatible type "D"; expected "B"
+    return A()
+    return f(C())
+
+def f(a: T) -> 'A[T]': pass
+class A(Generic[T]): pass
+class B: pass
+class C(B): pass
+class D: pass
+[out]
+
+[case testIntersectionWithInferredGenericArgument]
+from typing import overload, TypeVar, Generic
+T = TypeVar('T')
+f(A())
+
+ at overload
+def f(x: 'A[B]') -> None: pass
+ at overload
+def f(x: 'B') -> None: pass
+class A(Generic[T]): pass
+class B: pass
+
+[case testInferenceWithAbstractClassContext]
+from typing import TypeVar, Generic
+from abc import abstractmethod, ABCMeta
+t = TypeVar('t')
+x = A() # type: I[int]
+a_object = A() # type: A[object]
+y = a_object # type: I[int] # E: Incompatible types in assignment (expression has type A[object], variable has type I[int])
+
+class I(Generic[t]):
+    @abstractmethod
+    def f(self): pass
+class A(I[t], Generic[t]):
+    def f(self): pass
+
+[case testInferenceWithAbstractClassContext2]
+from typing import TypeVar, Generic
+from abc import abstractmethod, ABCMeta
+t = TypeVar('t')
+a = f(A()) # type: A[int]
+a_int = A() # type: A[int]
+aa = f(a_int)
+class I(Generic[t]): pass
+class A(I[t], Generic[t]): pass
+def f(i: I[t]) -> A[t]: pass
+
+[case testInferenceWithAbstractClassContext3]
+from typing import TypeVar, Generic, Iterable
+t = TypeVar('t')
+class set(Generic[t]):
+    def __init__(self, iterable: Iterable[t]) -> None: pass
+b = bool()
+l = set([b])
+l = set([object()]) # E: List item 0 has incompatible type "object"
+[builtins fixtures/for.pyi]
+
+
+-- Infer generic type in 'Any' context
+-- -----------------------------------
+
+
+[case testInferGenericTypeInAnyContext]
+from typing import Any, TypeVar, Generic
+s = TypeVar('s')
+t = TypeVar('t')
+x = [] # type: Any
+y = C() # type: Any
+class C(Generic[s, t]): pass
+[builtins fixtures/list.pyi]
+
+
+-- Lambdas
+-- -------
+
+
+[case testInferLambdaArgumentTypeUsingContext]
+from typing import Callable
+f = None # type: Callable[[B], A]
+f = lambda x: x.o
+f = lambda x: x.x # E: "B" has no attribute "x"
+class A: pass
+class B:
+  o = None # type: A
+
+[case testInferLambdaReturnTypeUsingContext]
+from typing import List, Callable
+f = None # type: Callable[[], List[A]]
+f = lambda: []
+f = lambda: [B()]  # E: List item 0 has incompatible type "B"
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+[case testInferLambdaAsGenericFunctionArgument]
+from typing import TypeVar, List, Any, Callable
+t = TypeVar('t')
+class A:
+  x = None # type: A
+def f(a: List[t], fn: Callable[[t], Any]) -> None: pass
+list_a = [] # type: List[A]
+f(list_a, lambda a: a.x)
+[builtins fixtures/list.pyi]
+
+[case testLambdaWithoutContext]
+reveal_type(lambda x: x)  # E: Revealed type is 'def (x: Any) -> Any'
+reveal_type(lambda x: 1)  # E: Revealed type is 'def (x: Any) -> builtins.int'
+
+[case testLambdaContextVararg]
+from typing import Callable
+def f(t: Callable[[str], str]) -> str: ''
+f(lambda *_: '')
+
+[case testInvalidContextForLambda]
+from typing import Callable
+f = lambda x: A() # type: Callable[[], A]
+f2 = lambda: A() # type: Callable[[A], A]
+class A: pass
+[out]
+main:2: error: Incompatible types in assignment (expression has type Callable[[Any], A], variable has type Callable[[], A])
+main:2: error: Cannot infer type of lambda
+main:3: error: Incompatible types in assignment (expression has type Callable[[], A], variable has type Callable[[A], A])
+main:3: error: Cannot infer type of lambda
+
+[case testEllipsisContextForLambda]
+from typing import Callable
+f1 = lambda x: 1 # type: Callable[..., int]
+f2 = lambda: 1 # type: Callable[..., int]
+f3 = lambda *args, **kwargs: 1 # type: Callable[..., int]
+f4 = lambda x: x # type: Callable[..., int]
+g = lambda x: 1 # type: Callable[..., str]
+[builtins fixtures/dict.pyi]
+[out]
+main:6: error: Incompatible types in assignment (expression has type Callable[[Any], int], variable has type Callable[..., str])
+main:6: error: Incompatible return value type (got "int", expected "str")
+
+[case testEllipsisContextForLambda2]
+from typing import TypeVar, Callable
+T = TypeVar('T')
+def foo(arg: Callable[..., T]) -> None: pass
+foo(lambda: 1)
+
+[case testLambdaNoneInContext]
+from typing import Callable
+def f(x: Callable[[], None]) -> None: pass
+def g(x: Callable[[], int]) -> None: pass
+f(lambda: None)
+g(lambda: None)
+
+[case testIsinstanceInInferredLambda]
+from typing import TypeVar, Callable
+T = TypeVar('T')
+S = TypeVar('S')
+class A: pass
+class B(A): pass
+class C(A): pass
+def f(func: Callable[[T], S], *z: T, r: S = None) -> S: pass
+f(lambda x: 0 if isinstance(x, B) else 1) # E: Cannot infer type argument 1 of "f"
+f(lambda x: 0 if isinstance(x, B) else 1, A())() # E: "int" not callable
+f(lambda x: x if isinstance(x, B) else B(), A(), r=B())() # E: "B" not callable
+f( # E: Argument 1 to "f" has incompatible type Callable[[A], A]; expected Callable[[A], B]
+    lambda x: B() if isinstance(x, B) else x, # E: Incompatible return value type (got "A", expected "B")
+    A(), r=B())
+[builtins fixtures/isinstance.pyi]
+
+
+-- Overloads + generic functions
+-- -----------------------------
+
+
+[case testMapWithOverloadedFunc]
+from typing import TypeVar, Callable, List, overload, Any
+t = TypeVar('t')
+s = TypeVar('s')
+def map(f: Callable[[t], s], seq: List[t]) -> List[s]: pass
+
+ at overload
+def g(o: object) -> 'B': pass
+ at overload
+def g(o: 'A', x: Any = None) -> 'B': pass
+
+class A: pass
+class B: pass
+
+m = map(g, [A()])
+b = m # type: List[B]
+a = m # type: List[A] # E: Incompatible types in assignment (expression has type List[B], variable has type List[A])
+[builtins fixtures/list.pyi]
+
+
+-- Boolean operators
+-- -----------------
+
+
+[case testOrOperationInferredFromContext]
+from typing import List
+a, b, c = None, None, None # type: (List[A], List[B], List[C])
+a = a or []
+a = [] or a
+b = b or [C()]
+a = a or b # E: Incompatible types in assignment (expression has type "Union[List[A], List[B]]", variable has type List[A])
+b = b or c # E: Incompatible types in assignment (expression has type "Union[List[B], List[C]]", variable has type List[B])
+
+class A: pass
+class B: pass
+class C(B): pass
+[builtins fixtures/list.pyi]
+
+
+-- Special cases
+-- -------------
+
+
+[case testSomeTypeVarsInferredFromContext]
+from typing import List, TypeVar
+t = TypeVar('t')
+s = TypeVar('s')
+# Some type variables can be inferred using context, but not all of them.
+a = None # type: List[A]
+a = f(A(), B())
+a = f(B(), B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
+def f(a: s, b: t) -> List[s]: pass
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+[case testSomeTypeVarsInferredFromContext2]
+from typing import List, TypeVar
+s = TypeVar('s')
+t = TypeVar('t')
+# Like testSomeTypeVarsInferredFromContext, but tvars in different order.
+a = None # type: List[A]
+a = f(A(), B())
+a = f(B(), B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
+def f(a: s, b: t) -> List[s]: pass
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+[case testLambdaInListAndHigherOrderFunction]
+from typing import TypeVar, Callable, List
+t = TypeVar('t')
+s = TypeVar('s')
+map(
+  [lambda x: x], [])
+def map(f: List[Callable[[t], s]], a: List[t]) -> List[s]: pass
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+
+[case testChainedAssignmentInferenceContexts]
+from typing import List
+i = None # type: List[int]
+s = None # type: List[str]
+i = i = []
+i = s = [] # E: Incompatible types in assignment (expression has type List[str], variable has type List[int])
+[builtins fixtures/list.pyi]
+
+[case testContextForAttributeDeclaredInInit]
+from typing import List
+class A:
+  def __init__(self):
+    self.x = [] # type: List[int]
+a = A()
+a.x = []
+a.x = [1]
+a.x = [''] # E: List item 0 has incompatible type "str"
+[builtins fixtures/list.pyi]
+
+[case testListMultiplyInContext]
+from typing import List
+a = None  # type: List[int]
+a = [None] * 3
+a = [''] * 3 # E: List item 0 has incompatible type "str"
+[builtins fixtures/list.pyi]
+
+[case testUnionTypeContext]
+from typing import Union, List, TypeVar
+T = TypeVar('T')
+def f(x: Union[List[T], str]) -> None: pass
+f([1])
+f('')
+f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Union[List[None], str]"
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIgnoringInferenceContext]
+from typing import TypeVar, List
+T = TypeVar('T')
+def f(x: List[T]) -> T: pass
+def g(y: object) -> None: pass
+a = [1]
+g(f(a))
+[builtins fixtures/list.pyi]
+
+[case testStar2Context]
+from typing import Any, Dict, Tuple, Iterable
+def f1(iterable: Iterable[Tuple[str, Any]] = None) -> None:
+    f2(**dict(iterable))
+def f2(iterable: Iterable[Tuple[str, Any]], **kw: Any) -> None:
+    pass
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testInferenceInGenericFunction]
+from typing import TypeVar, List
+T = TypeVar('T')
+def f(a: T) -> None:
+    l = []  # type: List[T]
+    l.append(a)
+    l.append(1) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "T"
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferenceInGenericClass]
+from typing import TypeVar, Generic, List
+S = TypeVar('S')
+T = TypeVar('T')
+class A(Generic[S]):
+    def f(self, a: T, b: S) -> None:
+        l = []  # type: List[T]
+        l.append(a)
+        l.append(b) # E: Argument 1 to "append" of "list" has incompatible type "S"; expected "T"
+[builtins fixtures/list.pyi]
+[out]
+
+[case testLambdaInGenericFunction]
+from typing import TypeVar, Callable
+T = TypeVar('T')
+S = TypeVar('S')
+def f(a: T, b: S) -> None:
+    c = lambda x: x  # type: Callable[[T], S]
+[out]
+main:5: error: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[T], S])
+main:5: error: Incompatible return value type (got "T", expected "S")
+
+[case testLambdaInGenericClass]
+from typing import TypeVar, Callable, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+class A(Generic[T]):
+    def f(self, b: S) -> None:
+        c = lambda x: x  # type: Callable[[T], S]
+[out]
+main:6: error: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[T], S])
+main:6: error: Incompatible return value type (got "T", expected "S")
+
+[case testRevealTypeContext]
+from typing import TypeVar, Callable, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+    pass
+reveal_type(A()) # E: Revealed type is '__main__.A[builtins.None]'
+b = reveal_type(A())  # type: A[int] # E: Revealed type is '__main__.A[builtins.int]'
+
+[case testUnionWithGenericTypeItemContext]
+from typing import TypeVar, Union, List
+
+T = TypeVar('T')
+
+def f(x: Union[T, List[int]]) -> Union[T, List[int]]: pass
+reveal_type(f(1)) # E: Revealed type is 'Union[builtins.int*, builtins.list[builtins.int]]'
+reveal_type(f([])) # E: Revealed type is 'builtins.list[builtins.int]'
+reveal_type(f(None)) # E: Revealed type is 'builtins.list[builtins.int]'
+[builtins fixtures/list.pyi]
+
+[case testUnionWithGenericTypeItemContextAndStrictOptional]
+# flags: --strict-optional
+from typing import TypeVar, Union, List
+
+T = TypeVar('T')
+
+def f(x: Union[T, List[int]]) -> Union[T, List[int]]: pass
+reveal_type(f(1)) # E: Revealed type is 'Union[builtins.int*, builtins.list[builtins.int]]'
+reveal_type(f([])) # E: Revealed type is 'builtins.list[builtins.int]'
+reveal_type(f(None)) # E: Revealed type is 'Union[builtins.None, builtins.list[builtins.int]]'
+[builtins fixtures/list.pyi]
+
+[case testUnionWithGenericTypeItemContextInMethod]
+from typing import TypeVar, Union, List, Generic
+
+T = TypeVar('T')
+S = TypeVar('S')
+
+class C(Generic[T]):
+    def f(self, x: Union[T, S]) -> Union[T, S]: pass
+
+c = C[List[int]]()
+reveal_type(c.f('')) # E: Revealed type is 'Union[builtins.list[builtins.int], builtins.str*]'
+reveal_type(c.f([1])) # E: Revealed type is 'builtins.list[builtins.int]'
+reveal_type(c.f([])) # E: Revealed type is 'builtins.list[builtins.int]'
+reveal_type(c.f(None)) # E: Revealed type is 'builtins.list[builtins.int]'
+[builtins fixtures/list.pyi]
+
+[case testGenericMethodCalledInGenericContext]
+from typing import TypeVar, Generic
+
+_KT = TypeVar('_KT')
+_VT = TypeVar('_VT')
+_T = TypeVar('_T')
+
+class M(Generic[_KT, _VT]):
+    def get(self, k: _KT, default: _T) -> _T: ...
+
+def f(d: M[_KT, _VT], k: _KT) -> _VT:
+    return d.get(k, None)
diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test
new file mode 100644
index 0000000..799fb35
--- /dev/null
+++ b/test-data/unit/check-inference.test
@@ -0,0 +1,1765 @@
+-- Inferring locals/globals with simple types
+-- ------------------------------------------
+
+
+[case testInferSimpleGvarType]
+import typing
+x = A()
+y = B()
+x = B() # Fail
+x = A()
+x = y   # Fail
+x = x
+class A: pass
+class B: pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:6: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testInferSimpleLvarType]
+import typing
+def f() -> None:
+  x = A()
+  y = B()
+  x = B() # Fail
+  x = A()
+  x = y   # Fail
+  x = x
+class A: pass
+class B: pass
+[out]
+main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:7: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testLvarInitializedToVoid]
+import typing
+def f() -> None:
+    a = g()    # E: "g" does not return a value
+    #b, c = g() # "g" does not return a value TODO
+
+def g() -> None: pass
+[out]
+
+[case testInferringLvarTypeFromArgument]
+import typing
+def f(a: 'A') -> None:
+    b = a
+    b = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    b = a
+    a = b
+
+class A: pass
+class B: pass
+[out]
+
+[case testInferringLvarTypeFromGvar]
+
+g = None # type: B
+
+def f() -> None:
+    a = g
+    a = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    a = B()
+
+class A: pass
+class B: pass
+[out]
+
+[case testInferringImplicitDynamicTypeForLvar]
+import typing
+def f() -> None:
+    a = g()
+    None(a) # E: None not callable
+    a.x()
+
+def g(): pass
+[out]
+
+[case testInferringExplicitDynamicTypeForLvar]
+from typing import Any
+g = None # type: Any
+
+def f(a: Any) -> None:
+    b = g
+    None(b) # E: None not callable
+    a.x()
+[out]
+
+
+-- Inferring types of local variables with complex types
+-- -----------------------------------------------------
+
+
+[case testInferringTupleTypeForLvar]
+
+def f() -> None:
+    a = A(), B()
+    aa = None # type: A
+    bb = None # type: B
+    bb = a[0] # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    aa = a[1] # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    aa = a[0]
+    bb = a[1]
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testInferringTupleTypeForLvarWithNones]
+import typing
+def f() -> None:
+    a = A(), None # E: Need type annotation for variable
+    b = None, A() # E: Need type annotation for variable
+
+class A: pass
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testInferringGenericTypeForLvar]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]): pass
+a_i = None # type: A[int]
+a_s = None # type: A[str]
+
+def f() -> None:
+    a_int = A() # type: A[int]
+    a = a_int
+    a = a_s # E: Incompatible types in assignment (expression has type A[str], variable has type A[int])
+    a = a_i
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testInferringFunctionTypeForLvar]
+import typing
+def f() -> None:
+    a = g
+    a(B()) # E: Argument 1 has incompatible type "B"; expected "A"
+    a(A())
+
+def g(a: 'A') -> None: pass
+
+class A: pass
+class B: pass
+[out]
+
+[case testInferringFunctionTypeForLvarFromTypeObject]
+import typing
+def f() -> None:
+    a = A
+    a(A()) # E: Too many arguments
+    a()
+    t = a # type: type
+
+class A: pass
+[out]
+
+
+-- Inferring variable types in multiple definition
+-- -----------------------------------------------
+
+
+[case testInferringLvarTypesInMultiDef]
+import typing
+def f() -> None:
+    a, b = A(), B()
+    a = b   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+    a = A()
+    b = B()
+
+class A: pass
+class B: pass
+[out]
+
+[case testInferringLvarTypesInTupleAssignment]
+from typing import Tuple
+def f() -> None:
+    t = None # type: Tuple[A, B]
+    a, b = t
+    a = b   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+    a = A()
+    b = B()
+
+class A: pass
+class B: pass
+[out]
+
+[case testInferringLvarTypesInNestedTupleAssignment1]
+from typing import Tuple
+def f() -> None:
+    t = None # type: Tuple[A, B]
+    a1, (a, b) = A(), t
+    a = b   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+    a = A()
+    b = B()
+
+class A: pass
+class B: pass
+[out]
+
+[case testInferringLvarTypesInNestedTupleAssignment2]
+import typing
+def f() -> None:
+    a, (b, c) = A(), (B(), C())
+    a = b   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    c = A() # E: Incompatible types in assignment (expression has type "A", variable has type "C")
+
+    a = A()
+    b = B()
+    c = C()
+
+class A: pass
+class B: pass
+class C: pass
+[out]
+
+[case testInferringLvarTypesInNestedListAssignment]
+import typing
+def f() -> None:
+    a, (b, c) = A(), [B(), C()]
+    a = b   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    c = A() # E: Incompatible types in assignment (expression has type "A", variable has type "C")
+
+    a = A()
+    b = B()
+    c = C()
+
+class A: pass
+class B: pass
+class C: pass
+[out]
+
+[case testInferringLvarTypesInMultiDefWithNoneTypes]
+import typing
+def f() -> None:
+    a, b = A(), None # E: Need type annotation for variable
+    c, d = None, A() # E: Need type annotation for variable
+
+class A: pass
+[out]
+
+[case testInferringLvarTypesInNestedTupleAssignmentWithNoneTypes]
+import typing
+def f() -> None:
+    a1, (a2, b) = A(), (A(), None) # E: Need type annotation for variable
+
+class A: pass
+[out]
+
+[case testInferringLvarTypesInMultiDefWithInvalidTuple]
+from typing import Tuple
+t = None # type: Tuple[object, object, object]
+
+def f() -> None:
+    a, b = t         # Fail
+    c, d, e, f = t   # Fail
+    g, h, i = t
+[builtins fixtures/tuple.pyi]
+[out]
+main:5: error: Too many values to unpack (2 expected, 3 provided)
+main:6: error: Need more than 3 values to unpack (4 expected)
+
+[case testInvalidRvalueTypeInInferredMultipleLvarDefinition]
+import typing
+def f() -> None:
+    a, b = f   # E: 'def ()' object is not iterable
+    c, d = A() # E: '__main__.A' object is not iterable
+class A: pass
+[builtins fixtures/for.pyi]
+[out]
+
+[case testInvalidRvalueTypeInInferredNestedTupleAssignment]
+import typing
+def f() -> None:
+    a1, (a2, b) = A(), f   # E: 'def ()' object is not iterable
+    a3, (c, d) = A(), A() # E: '__main__.A' object is not iterable
+class A: pass
+[builtins fixtures/for.pyi]
+[out]
+
+[case testInferringMultipleLvarDefinitionWithListRvalue]
+from typing import List
+
+class C: pass
+class D: pass
+
+def f() -> None:
+    list_c = [C()]
+    list_d = [D()]
+    a, b = list_c
+    c, d, e = list_d
+    a = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C")
+    b = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C")
+    c = C() # E: Incompatible types in assignment (expression has type "C", variable has type "D")
+    b = c   # E: Incompatible types in assignment (expression has type "D", variable has type "C")
+
+    a = C()
+    b = C()
+    c = D()
+    d = D()
+    e = D()
+
+    a = b
+    c = d
+    d = e
+[builtins fixtures/for.pyi]
+[out]
+
+[case testInferringNestedTupleAssignmentWithListRvalue]
+from typing import List
+
+class C: pass
+class D: pass
+
+def f() -> None:
+    list_c = [C()]
+    list_d = [D()]
+    c1, (a, b) = C(), list_c
+    c2, (c, d, e) = C(), list_d
+    a = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C")
+    b = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C")
+    c = C() # E: Incompatible types in assignment (expression has type "C", variable has type "D")
+    b = c   # E: Incompatible types in assignment (expression has type "D", variable has type "C")
+
+    a = C()
+    b = C()
+    c = D()
+    d = D()
+    e = D()
+
+    a = b
+    c = d
+    d = e
+[builtins fixtures/for.pyi]
+[out]
+
+[case testInferringMultipleLvarDefinitionWithImplicitDynamicRvalue]
+import typing
+def f() -> None:
+    a, b = g()
+    a.x
+    b.x
+def g(): pass
+
+[case testInferringMultipleLvarDefinitionWithExplicitDynamicRvalue]
+from typing import Any
+def f(d: Any) -> None:
+    a, b = d
+    a.x
+    b.x
+
+[case testInferringTypesFromIterable]
+from typing import Iterable
+class Nums(Iterable[int]):
+    def __iter__(self): pass
+    def __next__(self): pass
+a, b = Nums()
+a = b = 1
+a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+b = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+[builtins fixtures/for.pyi]
+
+
+-- Type variable inference for generic functions
+-- ---------------------------------------------
+
+
+[case testInferSimpleGenericFunction]
+from typing import Tuple, TypeVar
+T = TypeVar('T')
+a = None # type: A
+b = None # type: B
+c = None # type: Tuple[A, object]
+
+b = id(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = id(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a = id(c) # E: Incompatible types in assignment (expression has type "Tuple[A, object]", variable has type "A")
+
+a = id(a)
+b = id(b)
+c = id(c)
+
+def id(a: T) -> T: pass
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+
+[case testInferringGenericFunctionTypeForLvar]
+from typing import TypeVar
+T = TypeVar('T')
+def f() -> None:
+    a = id
+    b = None # type: int
+    c = None # type: str
+    b = a(c) # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+    b = a(b)
+    c = a(c)
+def id(x: T) -> T:
+    return x
+[out]
+
+[case testUnderspecifiedInferenceResult]
+from typing import TypeVar
+T = TypeVar('T')
+class A: pass
+a = None # type: A
+
+def ff() -> None:
+    x = f() # E: Need type annotation for variable
+
+g(None) # Ok
+f()     # Ok because not used to infer local variable type
+g(a)
+
+def f() -> T: pass
+def g(a: T) -> None: pass
+[out]
+
+[case testUnsolvableInferenceResult]
+from typing import TypeVar
+T = TypeVar('T')
+f(A(), g()) # Fail
+f(A(), A())
+
+def f(a: T, b: T) -> None: pass
+def g() -> None: pass
+class A: pass
+[out]
+main:3: error: Cannot infer type argument 1 of "f"
+main:3: error: "g" does not return a value
+
+[case testInferenceWithMultipleConstraints]
+from typing import TypeVar
+T = TypeVar('T')
+a = None # type: A
+b = None # type: B
+
+b = f(a, b) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b = f(b, a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = f(a, b)
+a = f(b, a)
+
+def f(a: T, b: T) -> T: pass
+
+class A: pass
+class B(A): pass
+
+[case testInferenceWithMultipleVariables]
+from typing import Tuple, TypeVar
+T = TypeVar('T')
+S = TypeVar('S')
+a, b = None, None # type: (A, B)
+taa = None # type: Tuple[A, A]
+tab = None # type: Tuple[A, B]
+tba = None # type: Tuple[B, A]
+
+taa = f(a, b) # Fail
+taa = f(b, a) # Fail
+tba = f(a, b) # Fail
+
+tab = f(a, b)
+tba = f(b, a)
+
+def f(a: T, b: S) -> Tuple[T, S]: pass
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+[out]
+main:9: error: Argument 2 to "f" has incompatible type "B"; expected "A"
+main:10: error: Argument 1 to "f" has incompatible type "B"; expected "A"
+main:11: error: Argument 1 to "f" has incompatible type "A"; expected "B"
+main:11: error: Argument 2 to "f" has incompatible type "B"; expected "A"
+
+[case testConstraintSolvingWithSimpleGenerics]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+ao = None # type: A[object]
+ab = None # type: A[B]
+ac = None # type: A[C]
+
+ab = f(ao) # E: Argument 1 to "f" has incompatible type A[object]; expected A[B]
+ao = f(ab) # E: Argument 1 to "f" has incompatible type A[B]; expected A[object]
+ab = f(ac) # E: Argument 1 to "f" has incompatible type A[C]; expected A[B]
+ab = g(ao) # E: Argument 1 to "g" has incompatible type A[object]; expected A[B]
+ao = g(ab) # E: Argument 1 to "g" has incompatible type A[B]; expected A[object]
+
+ab = f(ab)
+ac = f(ac)
+ao = f(ao)
+
+ab = g(ab)
+ao = g(ao)
+
+def f(a: 'A[T]') -> 'A[T]': pass
+
+def g(a: T) -> T: pass
+
+class A(Generic[T]): pass
+class B: pass
+class C: pass
+
+[case testConstraintSolvingFailureWithSimpleGenerics]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+ao = None # type: A[object]
+ab = None # type: A[B]
+
+f(ao, ab) # E: Cannot infer type argument 1 of "f"
+f(ab, ao) # E: Cannot infer type argument 1 of "f"
+f(ao, ao)
+f(ab, ab)
+
+def f(a: 'A[T]', b: 'A[T]') -> None: pass
+
+class A(Generic[T]): pass
+class B: pass
+
+[case testTypeInferenceWithCalleeDefaultArgs]
+from typing import TypeVar
+T = TypeVar('T')
+a = None # type: A
+o = None # type: object
+
+a = f(o)    # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+a = g(a, o) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+
+o = f()
+o = f(o)
+a = f(a)
+a = g(a)
+
+def f(a: T = None) -> T: pass
+def g(a: T, b: T = None) -> T: pass
+
+class A: pass
+
+
+-- Generic function inference with multiple inheritance
+-- ----------------------------------------------------
+
+
+[case testGenericFunctionInferenceWithMultipleInheritance]
+from typing import TypeVar
+
+class I: pass
+class J: pass
+
+class A(I, J): pass
+class B(I, J): pass
+class C(I): pass
+class D(J): pass
+
+T = TypeVar('T')
+def f(a: T, b: T) -> T: pass
+def g(x: I) -> None: pass
+
+a = f(A(), C())
+g(a)
+b = f(A(), B())
+g(b)
+c = f(A(), D())
+g(c) # E: Argument 1 to "g" has incompatible type "J"; expected "I"
+d = f(D(), A())
+g(d) # E: Argument 1 to "g" has incompatible type "J"; expected "I"
+e = f(D(), C())
+g(e) # E: Argument 1 to "g" has incompatible type "object"; expected "I"
+
+[case testGenericFunctionInferenceWithMultipleInheritance2]
+from typing import TypeVar
+
+class I: pass
+class J: pass
+
+class A(I): pass
+class B(A, J): pass
+class C(I, J): pass
+
+T = TypeVar('T')
+def f(a: T, b: T) -> T: pass
+def g(x: I) -> None: pass
+def h(x: J) -> None: pass
+
+a = f(B(), C())
+g(a)
+h(a) # E: Argument 1 to "h" has incompatible type "I"; expected "J"
+b = f(C(), B())
+g(b)
+h(b) # E: Argument 1 to "h" has incompatible type "I"; expected "J"
+c = f(A(), B())
+g(a)
+h(b) # E: Argument 1 to "h" has incompatible type "I"; expected "J"
+
+[case testGenericFunctionInferenceWithMultipleInheritance3]
+from typing import TypeVar
+
+class I: pass
+class J: pass
+class K(J): pass
+
+class A(K): pass
+class B(A, I): pass
+class C(I, J): pass
+
+T = TypeVar('T')
+def f(a: T, b: T) -> T: pass
+def g(x: K) -> None: pass
+
+a = f(B(), C())
+g(a) # E: Argument 1 to "g" has incompatible type "J"; expected "K"
+b = f(A(), C())
+g(b) # E: Argument 1 to "g" has incompatible type "J"; expected "K"
+c = f(A(), B())
+g(c)
+
+[case testPrecedenceOfFirstBaseAsInferenceResult]
+from typing import TypeVar
+from abc import abstractmethod, ABCMeta
+T = TypeVar('T')
+a, i, j = None, None, None # type: (A, I, J)
+
+a = f(B(), C())
+
+class I(metaclass=ABCMeta): pass
+class J(metaclass=ABCMeta): pass
+
+def f(a: T, b: T) -> T: pass
+
+class A: pass
+class B(A, I, J): pass
+class C(A, I, J): pass
+
+
+-- Generic function inference with function arguments
+-- --------------------------------------------------
+
+
+[case testNonOverloadedMapInference]
+from typing import TypeVar, Callable, List
+t = TypeVar('t')
+s = TypeVar('s')
+class A: pass
+b = bool()
+def f(x: bool) -> A: pass
+def mymap(f: Callable[[t], s], a: List[t]) -> List[s]: pass
+
+l = mymap(f, [b])
+l = [A()]
+lb = [b]
+l = lb # E: Incompatible types in assignment (expression has type List[bool], variable has type List[A])
+[builtins fixtures/for.pyi]
+
+
+-- Generic function inference with unions
+-- --------------------------------------
+
+
+[case testUnionInference]
+from typing import TypeVar, Union, List
+T = TypeVar('T')
+U = TypeVar('U')
+def f(x: Union[T, int], y: T) -> T: pass
+f(1, 'a')() # E: "str" not callable
+f('a', 1)() # E: "object" not callable
+f('a', 'a')() # E: "str" not callable
+f(1, 1)() # E: "int" not callable
+
+def g(x: Union[T, List[T]]) -> List[T]: pass
+def h(x: List[str]) -> None: pass
+g('a')() # E: List[str] not callable
+
+# The next line is a case where there are multiple ways to satisfy a constraint
+# involving a Union. Either T = List[str] or T = str would turn out to be valid,
+# but mypy doesn't know how to branch on these two options (and potentially have
+# to backtrack later) and defaults to T = None. The result is an awkward error
+# message. Either a better error message, or simply accepting the call, would be
+# preferable here.
+g(['a']) # E: Argument 1 to "g" has incompatible type List[str]; expected List[None]
+
+h(g(['a']))
+
+def i(x: Union[List[T], List[U]], y: List[T], z: List[U]) -> None: pass
+a = [1]
+b = ['b']
+i(a, a, b)
+i(b, a, b)
+i(a, b, b) # E: Argument 1 to "i" has incompatible type List[int]; expected List[str]
+[builtins fixtures/list.pyi]
+
+
+[case testUnionInferenceWithTypeVarValues]
+from typing import TypeVar, Union
+AnyStr = TypeVar('AnyStr', bytes, str)
+def f(x: Union[AnyStr, int], *a: AnyStr) -> None: pass
+f('foo')
+f('foo', 'bar')
+f('foo', b'bar') # E: Type argument 1 of "f" has incompatible value "object"
+f(1)
+f(1, 'foo')
+f(1, 'foo', b'bar') # E: Type argument 1 of "f" has incompatible value "object"
+[builtins fixtures/primitives.pyi]
+
+
+[case testUnionTwoPassInference-skip]
+from typing import TypeVar, Union, List
+T = TypeVar('T')
+U = TypeVar('U')
+def j(x: Union[List[T], List[U]], y: List[T]) -> List[U]: pass
+
+a = [1]
+b = ['b']
+# We could infer: Since List[str] <: List[T], we must have T = str.
+# Then since List[int] <: Union[List[str], List[U]], and List[int] is
+# not a subtype of List[str], we must have U = int.
+# This is not currently implemented.
+j(a, b)
+[builtins fixtures/list.pyi]
+
+
+[case testUnionContext]
+from typing import TypeVar, Union, List
+T = TypeVar('T')
+def f() -> List[T]: pass
+d1 = f() # type: Union[List[int], str]
+d2 = f() # type: Union[int, str] # E: Incompatible types in assignment (expression has type List[None], variable has type "Union[int, str]")
+def g(x: T) -> List[T]: pass
+d3 = g(1) # type: Union[List[int], List[str]]
+[builtins fixtures/list.pyi]
+
+
+[case testGenericFunctionSubtypingWithUnions]
+from typing import TypeVar, Union, List
+T = TypeVar('T')
+S = TypeVar('S')
+def k1(x: int, y: List[T]) -> List[Union[T, int]]: pass
+def k2(x: S, y: List[T]) -> List[Union[T, int]]: pass
+a = k2
+a = k2
+a = k1 # E: Incompatible types in assignment (expression has type Callable[[int, List[T]], List[Union[T, int]]], variable has type Callable[[S, List[T]], List[Union[T, int]]])
+b = k1
+b = k1
+b = k2
+[builtins fixtures/list.pyi]
+
+
+-- Literal expressions
+-- -------------------
+
+
+[case testDictLiteral]
+from typing import Dict
+class A: pass
+class B: pass
+def d_ab() -> Dict[A, B]: return {}
+def d_aa() -> Dict[A, A]: return {}
+a, b = None, None # type: (A, B)
+d = {a:b}
+d = d_ab()
+d = d_aa() # E: Incompatible types in assignment (expression has type Dict[A, A], variable has type Dict[A, B])
+[builtins fixtures/dict.pyi]
+
+[case testSetLiteral]
+from typing import Any, Set
+a, x = None, None # type: (int, Any)
+def s_i() -> Set[int]: return set()
+def s_s() -> Set[str]: return set()
+s = {a}
+s = {x}
+s = s_i()
+s = s_s() # E: Incompatible types in assignment (expression has type Set[str], variable has type Set[int])
+[builtins fixtures/set.pyi]
+
+[case testSetWithStarExpr]
+# flags: --fast-parser
+s = {1, 2, *(3, 4)}
+t = {1, 2, *s}
+reveal_type(s)  # E: Revealed type is 'builtins.set[builtins.int*]'
+reveal_type(t)  # E: Revealed type is 'builtins.set[builtins.int*]'
+[builtins fixtures/set.pyi]
+
+
+-- For statements
+-- --------------
+
+
+[case testInferenceOfFor1]
+a, b = None, None # type: (A, B)
+
+for x in [A()]:
+    b = x # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    a = x
+
+for y in []: # E: Need type annotation for variable
+    a = y
+
+class A: pass
+class B: pass
+[builtins fixtures/for.pyi]
+
+[case testInferenceOfFor2]
+
+a, b, c = None, None, None # type: (A, B, C)
+for x, (y, z) in [(A(), (B(), C()))]:
+    b = x # Fail
+    c = y # Fail
+    a = z # Fail
+    a = x
+    b = y
+    c = z
+for xx, yy, zz in [(A(), B())]: # Fail
+    pass
+for xx, (yy, zz) in [(A(), B())]: # Fail
+    pass
+for xxx, yyy in [(None, None)]: # Fail
+    pass
+
+class A: pass
+class B: pass
+class C: pass
+[builtins fixtures/for.pyi]
+[out]
+main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+main:5: error: Incompatible types in assignment (expression has type "B", variable has type "C")
+main:6: error: Incompatible types in assignment (expression has type "C", variable has type "A")
+main:10: error: Need more than 2 values to unpack (3 expected)
+main:12: error: '__main__.B' object is not iterable
+main:14: error: Need type annotation for variable
+
+[case testInferenceOfFor3]
+
+a, b = None, None # type: (A, B)
+
+for x, y in [[A()]]:
+    b = x # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    b = y # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    a = x
+    a = y
+
+for e, f in [[]]: # E: Need type annotation for variable
+    pass
+
+class A: pass
+class B: pass
+[builtins fixtures/for.pyi]
+
+[case testForStatementInferenceWithVoid]
+import typing
+for x in f(): # E: "f" does not return a value
+    pass
+def f() -> None: pass
+[builtins fixtures/for.pyi]
+
+[case testReusingInferredForIndex]
+import typing
+for a in [A()]: pass
+a = A()
+a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+for a in []: pass
+a = A()
+a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+class A: pass
+class B: pass
+[builtins fixtures/for.pyi]
+
+[case testReusingInferredForIndex2]
+import typing
+def f() -> None:
+    for a in [A()]: pass
+    a = A()
+    a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    for a in []: pass
+    a = A()
+    a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+class A: pass
+class B: pass
+[builtins fixtures/for.pyi]
+[out]
+
+
+-- Regression tests
+-- ----------------
+
+
+[case testMultipleAssignmentWithPartialDefinition]
+
+a = None # type: A
+x, a = a, a
+x = a
+a = x
+x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+class A: pass
+
+[case testMultipleAssignmentWithPartialDefinition2]
+
+a = None # type: A
+a, x = [a, a]
+x = a
+a = x
+x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+class A: pass
+[builtins fixtures/for.pyi]
+
+[case testMultipleAssignmentWithPartialDefinition3]
+from typing import Any, cast
+a = None # type: A
+x, a = cast(Any, a)
+x = a
+a = x
+x = object()
+a = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+class A: pass
+
+[case testInferGlobalDefinedInBlock]
+import typing
+if A:
+    a = A()
+    a = A()
+    a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+class A: pass
+class B: pass
+
+
+-- Inferring attribute types
+-- -------------------------
+
+
+[case testInferAttributeType]
+import typing
+class A:
+    a = B()
+class B: pass
+
+A().a = B()
+A().a = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testInferAttributeTypeAndAssignInInit]
+import typing
+class A:
+    a = B()
+    def __init__(self) -> None:
+        self.a = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+        self.a = B()
+class B: pass
+[out]
+
+[case testInferAttributeInInit]
+import typing
+class B: pass
+class A:
+    def __init__(self) -> None:
+        self.a = A()
+        self.b = B()
+a = A()
+a.a = A()
+a.b = B()
+a.a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a.b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testInferAttributeInInitUsingChainedAssignment]
+import typing
+class B: pass
+class A:
+    def __init__(self) -> None:
+        self.a = self.b = A()
+a = A()
+a.a = A()
+a.b = A()
+a.a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a.b = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+
+-- Lambdas
+-- -------
+
+
+[case testInferLambdaType]
+from typing import List, Callable
+li = [1]
+l = lambda: li
+f1 = l # type: Callable[[], List[int]]
+f2 = l # type: Callable[[], List[str]] # E: Incompatible types in assignment (expression has type Callable[[], List[int]], variable has type Callable[[], List[str]])
+[builtins fixtures/list.pyi]
+
+[case testInferLambdaType2]
+from typing import List, Callable
+l = lambda: [B()]
+f1 = l # type: Callable[[], List[B]]
+f2 = l # type: Callable[[], List[A]] # E: Incompatible types in assignment (expression has type Callable[[], List[B]], variable has type Callable[[], List[A]])
+
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+[case testUninferableLambda]
+from typing import TypeVar, Callable
+X = TypeVar('X')
+def f(x: Callable[[X], X]) -> X: pass
+y = f(lambda x: x) # E: Cannot infer type argument 1 of "f"
+
+[case testUninferableLambdaWithTypeError]
+from typing import TypeVar, Callable
+X = TypeVar('X')
+def f(x: Callable[[X], X], y: str) -> X: pass
+y = f(lambda x: x, 1) # Fail
+[out]
+main:4: error: Cannot infer type argument 1 of "f"
+main:4: error: Argument 2 to "f" has incompatible type "int"; expected "str"
+
+[case testInferLambdaNone]
+from typing import Callable
+def f(x: Callable[[], None]) -> None: pass
+def g(x: Callable[[], int]) -> None: pass
+a = lambda: None
+f(a)
+g(a) # E: Argument 1 to "g" has incompatible type Callable[[], None]; expected Callable[[], int]
+b = lambda: None  # type: Callable[[], None]
+f(b)
+g(b) # E: Argument 1 to "g" has incompatible type Callable[[], None]; expected Callable[[], int]
+
+
+-- Boolean operators
+-- -----------------
+
+
+[case testOrOperationWithGenericOperands]
+from typing import List
+a = None # type: List[A]
+o = None # type: List[object]
+a2 = a or []
+a = a2
+a2 = o # E: Incompatible types in assignment (expression has type List[object], variable has type List[A])
+class A: pass
+[builtins fixtures/list.pyi]
+
+
+-- Accessing variable before its type has been inferred
+-- ----------------------------------------------------
+
+
+[case testAccessGlobalVarBeforeItsTypeIsAvailable]
+import typing
+x.y  # E: Cannot determine type of 'x'
+x = object()
+x.y  # E: "object" has no attribute "y"
+
+[case testAccessDataAttributeBeforeItsTypeIsAvailable]
+
+a = None # type: A
+a.x.y  # E: Cannot determine type of 'x'
+class A:
+   def __init__(self) -> None:
+       self.x = object()
+a.x.y  # E: "object" has no attribute "y"
+
+
+-- Ducktype declarations
+-- ---------------------
+
+
+[case testListWithDucktypeCompatibility]
+from typing import List, _promote
+class A: pass
+ at _promote(A)
+class B: pass
+a = None  # type: List[A]
+x1 = [A(), B()]
+x2 = [B(), A()]
+x3 = [B(), B()]
+a = x1
+a = x2
+a = x3 # E: Incompatible types in assignment (expression has type List[B], variable has type List[A])
+[builtins fixtures/list.pyi]
+
+[case testListWithDucktypeCompatibilityAndTransitivity]
+from typing import List, _promote
+class A: pass
+ at _promote(A)
+class B: pass
+ at _promote(B)
+class C: pass
+a = None  # type: List[A]
+x1 = [A(), C()]
+x2 = [C(), A()]
+x3 = [B(), C()]
+a = x1
+a = x2
+a = x3 # E: Incompatible types in assignment (expression has type List[B], variable has type List[A])
+[builtins fixtures/list.pyi]
+
+
+-- Inferring type of variable when initialized to an empty collection
+-- ------------------------------------------------------------------
+
+
+[case testInferListInitializedToEmpty]
+a = []
+a.append(1)
+a.append('')  # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyUsingUpdate]
+a = []
+a.extend([''])
+a.append(0)  # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "str"
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyAndNotAnnotated]
+a = []  # E: Need type annotation for variable
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyAndReadBeforeAppend]
+a = []  # E: Need type annotation for variable
+if a: pass
+a.xyz
+a.append('')
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyAndIncompleteTypeInAppend]
+a = [] # E: Need type annotation for variable
+a.append([])
+a()
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyAndMultipleAssignment]
+a, b = [], []
+a.append(1)
+b.append('')
+a() # E: List[int] not callable
+b() # E: List[str] not callable
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyInFunction]
+def f() -> None:
+   a = []
+   a.append(1)
+   a.append('')  # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyAndNotAnnotatedInFunction]
+def f() -> None:
+    a = []  # E: Need type annotation for variable
+
+def g() -> None: pass
+
+a = []
+a.append(1)
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyAndReadBeforeAppendInFunction]
+def f() -> None:
+    a = []  # E: Need type annotation for variable
+    if a: pass
+    a.xyz
+    a.append('')
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyInClassBody]
+class A:
+   a = []
+   a.append(1)
+   a.append('')  # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyAndNotAnnotatedInClassBody]
+class A:
+    a = []  # E: Need type annotation for variable
+
+class B:
+    a = []
+    a.append(1)
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyInMethod]
+class A:
+    def f(self) -> None:
+        a = []
+        a.append(1)
+        a.append('')  # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyAndNotAnnotatedInMethod]
+class A:
+    def f(self) -> None:
+        a = []  # E: Need type annotation for variable
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferListInitializedToEmptyInMethodViaAttribute]
+class A:
+    def f(self) -> None:
+        # Attributes aren't supported right now.
+        self.a = [] # E: Need type annotation for variable
+        self.a.append(1) # E: Cannot determine type of 'a'
+        self.a.append('') # E: Cannot determine type of 'a'
+[builtins fixtures/list.pyi]
+[out]
+
+[case testInferSetInitializedToEmpty]
+a = set()
+a.add(1)
+a.add('')  # E: Argument 1 to "add" of "set" has incompatible type "str"; expected "int"
+[builtins fixtures/set.pyi]
+[out]
+
+[case testInferSetInitializedToEmptyUsingDiscard]
+a = set()
+a.discard('')
+a.add(0)  # E: Argument 1 to "add" of "set" has incompatible type "int"; expected "str"
+[builtins fixtures/set.pyi]
+[out]
+
+[case testInferSetInitializedToEmptyUsingUpdate]
+a = set()
+a.update({0})
+a.add('')  # E: Argument 1 to "add" of "set" has incompatible type "str"; expected "int"
+[builtins fixtures/set.pyi]
+[out]
+
+[case testInferDictInitializedToEmpty]
+a = {}
+a[1] = ''
+a() # E: Dict[int, str] not callable
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testInferDictInitializedToEmptyUsingUpdate]
+a = {}
+a.update({'': 42})
+a() # E: Dict[str, int] not callable
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testInferDictInitializedToEmptyUsingUpdateError]
+a = {}  # E: Need type annotation for variable
+a.update([1, 2])
+a()
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testInferDictInitializedToEmptyAndIncompleteTypeInUpdate]
+a = {} # E: Need type annotation for variable
+a[1] = {}
+b = {} # E: Need type annotation for variable
+b[{}] = 1
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testInferDictInitializedToEmptyAndUpdatedFromMethod]
+map = {}
+def add():
+    map[1] = 2
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testSpecialCaseEmptyListInitialization]
+def f(blocks: Any): # E: Name 'Any' is not defined
+    to_process = [] # E: Need type annotation for variable
+    to_process = list(blocks)
+[builtins fixtures/list.pyi]
+[out]
+
+[case testSpecialCaseEmptyListInitialization2]
+def f(blocks: object):
+    to_process = [] # E: Need type annotation for variable
+    to_process = list(blocks) # E: No overload variant of "list" matches argument types [builtins.object]
+[builtins fixtures/list.pyi]
+[out]
+
+
+-- Inferring types of variables first initialized to None (partial types)
+-- ----------------------------------------------------------------------
+
+
+[case testLocalVariablePartiallyInitializedToNone]
+def f() -> None:
+    if object():
+        x = None
+    else:
+        x = 1
+    x() # E: "int" not callable
+[out]
+
+[case testLocalVariablePartiallyTwiceInitializedToNone]
+def f() -> None:
+    if object():
+        x = None
+    elif object():
+        x = None
+    else:
+        x = 1
+    x() # E: "int" not callable
+[out]
+
+[case testLvarInitializedToNoneWithoutType]
+import typing
+def f() -> None:
+    a = None # E: Need type annotation for variable
+    a.x() # E: None has no attribute "x"
+[out]
+
+[case testGvarPartiallyInitializedToNone]
+x = None
+if object():
+    x = 1
+x() # E: "int" not callable
+
+[case testPartiallyInitializedToNoneAndThenToPartialList]
+x = None
+if object():
+    # Promote from partial None to partial list.
+    x = []
+    x.append(1)
+x.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
+[builtins fixtures/list.pyi]
+
+[case testPartiallyInitializedToNoneAndThenReadPartialList]
+x = None
+if object():
+    # Promote from partial None to partial list.
+    x = []  # E: Need type annotation for variable
+    x
+[builtins fixtures/list.pyi]
+
+[case testPartiallyInitializedToNoneAndPartialListAndLeftPartial]
+def f() -> None:
+    x = None
+    if object():
+        # Promote from partial None to partial list.
+        x = []  # E: Need type annotation for variable
+[builtins fixtures/list.pyi]
+[out]
+
+[case testPartiallyInitializedToNoneAndThenToIncompleteType]
+from typing import TypeVar,  Dict
+T = TypeVar('T')
+def f(*x: T) -> Dict[int, T]: pass
+x = None  # E: Need type annotation for variable
+if object():
+    x = f()
+[builtins fixtures/dict.pyi]
+
+[case testPartiallyInitializedVariableDoesNotEscapeScope1]
+def f() -> None:
+    x = None  # E: Need type annotation for variable
+x = 1
+[out]
+
+[case testPartiallyInitializedVariableDoesNotEscapeScope2]
+x = None  # E: Need type annotation for variable
+def f() -> None:
+    x = None
+    x = 1
+x()  # E: None not callable
+
+[case testAttributePartiallyInitializedToNone]
+class A:
+    def f(self) -> None:
+        self.x = None
+        self.x = 1
+        self.x() # E: "int" not callable
+[out]
+
+[case testAttributePartiallyInitializedToNoneWithMissingAnnotation]
+class A:
+    def f(self) -> None:
+        self.x = None
+
+    def g(self) -> None:
+        self.x = 1
+        self.x()
+[out]
+main:3: error: Need type annotation for variable
+main:7: error: "int" not callable
+
+[case testGlobalInitializedToNoneSetFromFunction]
+a = None
+def f():
+    global a
+    a = 42
+[out]
+
+[case testGlobalInitializedToNoneSetFromMethod]
+a = None
+class C:
+    def m(self):
+        global a
+        a = 42
+[out]
+
+-- More partial type errors
+-- ------------------------
+
+[case testPartialTypeErrorSpecialCase1]
+# This used to crash.
+class A:
+    x = None
+    def f(self) -> None:
+        for a in self.x:
+            pass
+[builtins fixtures/for.pyi]
+[out]
+main:3: error: Need type annotation for variable
+main:5: error: None has no attribute "__iter__"
+
+[case testPartialTypeErrorSpecialCase2]
+# This used to crash.
+class A:
+    x = []
+    def f(self) -> None:
+        for a in self.x:
+            pass
+[builtins fixtures/for.pyi]
+[out]
+main:3: error: Need type annotation for variable
+
+[case testPartialTypeErrorSpecialCase3]
+class A:
+    x = None
+    def f(self) -> None:
+        for a in A.x:
+            pass
+[builtins fixtures/for.pyi]
+[out]
+main:2: error: Need type annotation for variable
+main:4: error: None has no attribute "__iter__"
+
+
+-- Multipass
+-- ---------
+
+
+[case testMultipassAndAccessVariableBeforeDefinition]
+def f() -> None:
+    y = x
+    y() # E: "int" not callable
+x = 1
+[out]
+
+[case testMultipassAndAccessInstanceVariableBeforeDefinition]
+class A:
+    def f(self) -> None:
+        y = self.x
+        y() # E: "int" not callable
+
+    def g(self) -> None:
+        self.x = 1
+[out]
+
+[case testMultipassAndTopLevelVariable]
+y = x # E: Cannot determine type of 'x'
+y()
+x = 1+0
+[out]
+
+[case testMultipassAndDecoratedMethod]
+from typing import Callable, TypeVar
+
+T = TypeVar('T')
+
+class A:
+    def f(self) -> None:
+        self.g() # E: Too few arguments for "g" of "A"
+        self.g(1)
+    @dec
+    def g(self, x: str) -> None: pass
+
+def dec(f: Callable[[A, str], T]) -> Callable[[A, int], T]: pass
+[out]
+
+[case testMultipassAndDefineAttributeBasedOnNotReadyAttribute]
+class A:
+    def f(self) -> None:
+        self.y = self.x
+
+    def g(self) -> None:
+        self.x = 1
+
+    def h(self) -> None:
+        self.y() # E: "int" not callable
+[out]
+
+[case testMultipassAndDefineAttributeBasedOnNotReadyAttribute2]
+class A:
+    def f(self) -> None:
+        self.y = self.x
+        self.z = self.y
+        self.z() # E
+        self.y() # E
+
+    def g(self) -> None:
+        self.x = 1
+
+    def h(self) -> None:
+        self.y() # E
+[out]
+main:5: error: "int" not callable
+main:6: error: "int" not callable
+main:12: error: "int" not callable
+
+[case testMultipassAndPartialTypes]
+def f() -> None:
+    x = []
+    y
+    x.append(1)
+    x.append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
+    x.append(y) # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int"
+y = ''
+[builtins fixtures/list.pyi]
+[out]
+
+[case testMultipassAndPartialTypes2]
+s = ''
+n = 0
+def f() -> None:
+    global s, n
+    x = []
+    x.append(y)
+    s = x[0]
+    n = x[0] # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+    x.append(1) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "str"
+y = ''
+[builtins fixtures/list.pyi]
+[out]
+
+[case testMultipassAndPartialTypes3]
+from typing import Dict
+def g(d: Dict[str, int]) -> None: pass
+def f() -> None:
+    x = {}
+    x[1] = y
+    g(x) # E: Argument 1 to "g" has incompatible type Dict[int, str]; expected Dict[str, int]
+    x[1] = 1 # E: Incompatible types in assignment (expression has type "int", target has type "str")
+    x[1] = ''
+y = ''
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testMultipassAndPartialTypes4]
+from typing import Dict
+def g(d: Dict[str, int]) -> None: pass
+def f() -> None:
+    x = {}
+    y
+    x[1] = 1
+    g(x) # E: Argument 1 to "g" has incompatible type Dict[int, int]; expected Dict[str, int]
+y = ''
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testMultipassAndCircularDependency]
+class A:
+    def f(self) -> None:
+        self.x = self.y # E: Cannot determine type of 'y'
+
+    def g(self) -> None:
+        self.y = self.x
+[out]
+
+[case testMultipassAndPartialTypesSpecialCase1]
+def f() -> None:
+    y = o
+    x = []
+    x.append(y)
+    x() # E: List[int] not callable
+o = 1
+[builtins fixtures/list.pyi]
+[out]
+
+[case testMultipassAndPartialTypesSpecialCase2]
+def f() -> None:
+    y = o
+    x = {}
+    x[''] = y
+    x() # E: Dict[str, int] not callable
+o = 1
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testMultipassAndPartialTypesSpecialCase3]
+def f() -> None:
+    x = {} # E: Need type annotation for variable
+    y = o
+    z = {} # E: Need type annotation for variable
+o = 1
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testMultipassAndPartialTypesSpecialCase4]
+def f() -> None:
+    y = o
+    x = None
+    x = y
+    x() # E: "int" not callable
+o = 1
+[out]
+
+[case testMultipassAndPartialTypesSpecialCase5]
+def f() -> None:
+    x = None
+    y = o
+    x = y
+    x() # E: "int" not callable
+o = 1
+[out]
+
+[case testMultipassAndClassAttribute]
+class S:
+    def foo(self) -> int:
+        return R.X
+
+class R:
+    X = 2
+
+[case testMultipassAndMultipleFiles]
+import m
+def f() -> None:
+    x()
+x = 0
+[file m.py]
+def g() -> None:
+    y()
+y = 0
+[out]
+tmp/m.py:2: error: "int" not callable
+main:3: error: "int" not callable
+
+
+-- Tests for special cases of unification
+-- --------------------------------------
+
+[case testUnificationRedundantUnion]
+from typing import Union
+a = None  # type: Union[int, str]
+b = None  # type: Union[str, tuple]
+def f(): pass
+def g(x: Union[int, str]): pass
+c = a if f() else b
+g(c) # E: Argument 1 to "g" has incompatible type "Union[int, str, tuple]"; expected "Union[int, str]"
+
+[case testUnificationMultipleInheritance]
+class A: pass
+class B:
+    def foo(self): pass
+class C(A, B): pass
+def f(): pass
+a1 = B() if f() else C()
+a1.foo()
+a2 = C() if f() else B()
+a2.foo()
+
+[case testUnificationMultipleInheritanceAmbiguous]
+# Show that join_instances_via_supertype() breaks ties using the first base class.
+class A1: pass
+class B1:
+    def foo1(self): pass
+class C1(A1, B1): pass
+
+class A2: pass
+class B2:
+    def foo2(self): pass
+class C2(A2, B2): pass
+
+class D1(C1, C2): pass
+class D2(C2, C1): pass
+
+def f(): pass
+
+a1 = D1() if f() else D2()
+a1.foo1()
+a2 = D2() if f() else D1()
+a2.foo2()
+
+[case testUnificationEmptyListLeft]
+def f(): pass
+a = [] if f() else [0]
+a() # E: List[int] not callable
+[builtins fixtures/list.pyi]
+
+[case testUnificationEmptyListRight]
+def f(): pass
+a = [0] if f() else []
+a() # E: List[int] not callable
+[builtins fixtures/list.pyi]
+
+[case testUnificationEmptyListLeftInContext]
+from typing import List
+def f(): pass
+a = [] if f() else [0] # type: List[int]
+a() # E: List[int] not callable
+[builtins fixtures/list.pyi]
+
+[case testUnificationEmptyListRightInContext]
+# TODO Find an example that really needs the context
+from typing import List
+def f(): pass
+a = [0] if f() else [] # type: List[int]
+a() # E: List[int] not callable
+[builtins fixtures/list.pyi]
+
+[case testUnificationEmptySetLeft]
+def f(): pass
+a = set() if f() else {0}
+a() # E: Set[int] not callable
+[builtins fixtures/set.pyi]
+
+[case testUnificationEmptyDictLeft]
+def f(): pass
+a = {} if f() else {0: 0}
+a() # E: Dict[int, int] not callable
+[builtins fixtures/dict.pyi]
+
+[case testUnificationEmptyDictRight]
+def f(): pass
+a = {0: 0} if f() else {}
+a() # E: Dict[int, int] not callable
+[builtins fixtures/dict.pyi]
+
+[case testUnificationDictWithEmptyListLeft]
+def f(): pass
+a = {0: []} if f() else {0: [0]}
+a() # E: Dict[int, List[int]] not callable
+[builtins fixtures/dict.pyi]
+
+[case testUnificationDictWithEmptyListRight]
+def f(): pass
+a = {0: [0]} if f() else {0: []}
+a() # E: Dict[int, List[int]] not callable
+[builtins fixtures/dict.pyi]
+
+[case testMisguidedSetItem]
+from typing import Generic, Sequence, TypeVar
+T = TypeVar('T')
+class C(Sequence[T], Generic[T]): pass
+C[0] = 0
+[out]
+main:4: error: Type expected within [...]
+main:4: error: Unsupported target for indexed assignment
diff --git a/test-data/unit/check-isinstance.test b/test-data/unit/check-isinstance.test
new file mode 100644
index 0000000..d6041cc
--- /dev/null
+++ b/test-data/unit/check-isinstance.test
@@ -0,0 +1,1330 @@
+[case testForcedAssignment]
+x = 1 # type: object
+y = 1
+y = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+x = 2
+y = x
+[case testJoinAny]
+from typing import List, Any
+
+x = None # type: List[Any]
+
+def foo() -> List[int]: pass
+def bar() -> List[str]: pass
+
+if bool():
+    x = foo()
+else:
+    x = bar()
+
+x * 2
+[builtins fixtures/list.pyi]
+
+[case testGeneratorExpressionTypes]
+
+class A: y = 1
+x = [A()]
+y = [x]
+
+z = [1,2]
+z = [a.y for b in y for a in b]
+
+[builtins fixtures/list.pyi]
+
+[case testClassAttributeInitialization-skip]
+class A:
+    x = None # type: int
+    def __init__(self) -> None:
+        self.y = None # type: int
+        z = self.x
+        w = self.y
+[case testAssignmentSubtypes-skip]
+from typing import Union
+
+def foo(x: Union[str, int]):
+    if isinstance(x, int):
+        x = 'a'
+    x + 'a'         # Works in the current code
+    z = x           # We probably want this to be of type str.
+    y = [x]         # But what type should this be?
+    y[0] + 'a'      #  (1) Should this work?
+    y + [1]         #  (2) Or this?
+    z = 1           # Also, is this valid?
+
+x = None # type: int
+y = [x]
+[builtins fixtures/isinstancelist.pyi]
+[out]
+
+[case testFunctionDefaultArgs]
+
+class A: pass
+class B(A): y = 1
+
+x =  A()
+
+def foo(x: A = B()):
+    x.y   # E: "A" has no attribute "y"
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceFancyConditionals]
+
+class A: pass
+class B(A): y = 1
+
+x =  A()
+
+if isinstance(x, B):
+    x.y
+
+while isinstance(x, B):
+    x.y
+
+while isinstance(x, B):
+    x.y
+    x = B()
+
+[builtins fixtures/isinstance.pyi]
+
+
+[case testSubtypingWithAny]
+
+class A: y = 1
+class B(A): z = 1
+
+def foo(): pass
+
+x = A()
+x = B()
+x.z
+x = foo()
+x.z          # E: "A" has no attribute "z"
+x.y
+
+[case testSingleMultiAssignment-skip]
+
+x = 'a'
+(x, ) = ('a',)
+
+[case testUnionMultiAssignment]
+from typing import Union
+
+x = None # type: Union[int, str]
+x = 1
+x = 'a'
+x + 1    # E: Unsupported operand types for + ("str" and "int")
+x = 1
+(x, y) = ('a', 1)
+x + 1    # E: Unsupported operand types for + ("str" and "int")
+
+[builtins fixtures/isinstancelist.pyi]
+
+[case testUnionIfZigzag]
+from typing import Union
+
+def f(x: Union[int, str]) -> None:
+    x = 1
+    if x:
+        x = 'a'
+        x = 1
+    x + 1
+[builtins fixtures/isinstancelist.pyi]
+
+
+[case testTwoLoopsUnion]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+def bar() -> None:
+    x = foo()
+    if isinstance(x, int):
+        return
+    while bool():
+        x + 'a'
+        while bool():
+            x = foo()
+            if bool():
+                return
+            x = 'a'
+    x + 'a'
+
+[builtins fixtures/isinstancelist.pyi]
+
+[case testComplicatedBlocks]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+def bar() -> None:
+    x = foo()
+    if isinstance(x, int):
+        return
+    while bool():
+        x + 'a'
+        while bool():
+            x = foo()
+            if bool():
+                return
+            x = 'a'
+    x + 'a'
+
+    x = foo()
+    if isinstance(x, int):
+        return
+    while bool():
+        x + 'a'
+        while bool():
+            x + 'a'         # E: Unsupported operand types for + (likely involving Union)
+            x = foo()
+            if bool():
+                continue
+            x = 'a'
+        x = 'a'
+    x + 'a'
+[builtins fixtures/isinstancelist.pyi]
+[out]
+
+[case testUnionTryExcept]
+
+class A: y = A()
+class B(A): z = 1
+
+x = A()
+x = B()
+x.z
+try:
+    x.z
+    x = A()
+    x = B()
+    x.z
+except:
+    pass
+x.z           # E: "A" has no attribute "z"
+
+
+[case testUnionTryExcept2]
+
+class A: y = A()
+class B(A): z = 1
+
+x = A()
+try:
+    x.z # E: "A" has no attribute "z"
+    x = A()
+    x = B()
+    x.z
+except:
+    x.z # E: "A" has no attribute "z"
+    x = B()
+    x.z
+else:
+    x = B()
+x.z
+
+[case testUnionTryExcept3]
+class A: y = A()
+class B(A): z = 1
+x = A()
+x = B()
+try:
+    raise BaseException()
+    x = A()
+except:
+    pass
+x.z
+x = B()
+try:
+    x = A()
+    raise BaseException()
+except:
+    pass
+x.z           # E: "A" has no attribute "z"
+x = B()
+try:
+    pass
+except:
+    x = A()
+    raise BaseException()
+x.z
+try:
+    x = A()
+except:
+    pass
+x.z           # E: "A" has no attribute "z"
+x = B()
+try:
+    pass
+except:
+    x = A()
+x.z           # E: "A" has no attribute "z"
+[builtins fixtures/exception.pyi]
+[case testUnionTryExcept4]
+
+class A: pass
+class B(A): z = 1
+
+x = A()
+while bool():
+    try:
+        x.z # E: "A" has no attribute "z"
+        x = A()
+    except:
+        x = B()
+    else:
+        x = B()
+    x.z
+[builtins fixtures/exception.pyi]
+[case testUnionTryFinally]
+class A: pass
+class B(A): b = 1
+
+x = A()
+x = B()
+try:
+    x = A()
+    x.b # E: "A" has no attribute "b"
+    x = B()
+finally:
+    x.b  # E: "A" has no attribute "b"
+x.b
+[case testUnionTryFinally2]
+class A: pass
+class B(A): b = 1
+
+x = A()
+x = B()
+try:
+    x = A()
+    x = B()
+except:
+    pass
+finally:
+    pass
+x.b      # E: "A" has no attribute "b"
+[case testUnionTryFinally3]
+class A: pass
+class B(A): b = 1
+
+x = A()
+x = B()
+try:
+    x = A()
+    x = B()
+except:
+    pass
+finally:
+    x = B()
+x.b
+[case testUnionTryFinally4]
+class A: pass
+class B(A): b = 1
+
+while 2:
+    x = A()
+    x = B()
+    try:
+        x = A()
+        x = B()
+    except:
+        pass
+    finally:
+        x.b     # E: "A" has no attribute "b"
+        if not isinstance(x, B):
+            break
+    x.b
+[builtins fixtures/isinstancelist.pyi]
+[case testUnionTryFinally5]
+class A: pass
+class B(A): b = 1
+
+while 2:
+    x = A()
+    try:
+        x = A()
+        x = B()
+    finally:
+        x.b    # E: "A" has no attribute "b"
+        break
+        x.b
+    x.b
+[case testUnionTryFinally6]
+class A: pass
+class B(A): b = 1
+
+def f() -> int:
+    x = B()  # type: A
+    try:
+        x = B()
+    except:
+        x = A()
+        # An exception could occur here
+        x = B()
+    finally:
+        return x.b # E: "A" has no attribute "b"
+[out]
+[case testUnionListIsinstance]
+
+from typing import Union, List
+
+class A: y = A()
+class B(A): z = C()
+
+class C: pass
+class D(C): d = 1
+
+
+def f(x: Union[List[int], List[str], int]) -> None:
+    if isinstance(x, list):
+        a = x[0]
+        if isinstance(a, int):
+            a + 1
+            a + 'x' # E: Unsupported operand types for + ("int" and "str")
+
+        # type of a?
+        x + 1 # E: Unsupported operand types for + (likely involving Union)
+    else:
+        x[0] # E: Value of type "int" is not indexable
+        x + 1
+    x[0] # E: Value of type "Union[List[int], List[str], int]" is not indexable
+    x + 1 # E: Unsupported operand types for + (likely involving Union)
+[builtins fixtures/isinstancelist.pyi]
+[out]
+
+[case testUnionListIsinstance2]
+
+from typing import Union, List
+class A: a = 1
+class B: pass
+class C: pass
+
+def g(x: Union[A, B]) -> A: pass
+def h(x: C) -> A: pass
+
+def f(x: Union[A, B, C]) -> None:
+    if isinstance(x, C):
+        x = h(x)
+    else:
+        x = g(x)
+    x.a
+[builtins fixtures/isinstancelist.pyi]
+
+[case testUnionStrictDefnBasic]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+x = foo()
+x = 1
+x = x + 1
+x = foo()
+x = x + 1                # E: Unsupported operand types for + (likely involving Union)
+if isinstance(x, str):
+   x = x + 1             # E: Unsupported operand types for + ("str" and "int")
+   x = 1
+   x = x + 1
+
+[builtins fixtures/isinstancelist.pyi]
+
+[case testSubtypeRedefinitionBasic]
+from typing import Union
+
+class A: pass
+class B(A): y = 1
+
+x = A()
+x.y        # E: "A" has no attribute "y"
+x = B()
+x.y        # OK: x is known to be a B
+
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsInstanceBasic]
+from typing import Union
+
+x = None # type: Union[int, str]
+
+if isinstance(x, str):
+    x = x + 1   # E: Unsupported operand types for + ("str" and "int")
+    x = x + 'a'
+else:
+    x = x + 'a' # E: Unsupported operand types for + ("int" and "str")
+    x = x + 1
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsInstanceIndexing]
+from typing import Union
+
+x = None # type: Union[int, str]
+j = [x]
+
+if isinstance(j[0], str):
+    j[0] = j[0] + 'a'
+    j[0] = j[0] + 1   # E: Unsupported operand types for + ("str" and "int")
+else:
+    j[0] = j[0] + 'a' # E: Unsupported operand types for + ("int" and "str")
+    j[0] = j[0] + 1
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsInstanceSubClassMember]
+from typing import Union
+
+class Animal:
+    pass
+
+class Dog(Animal):
+    paws = 4  # type: Union[int, str]
+
+    def bark(self): pass
+
+class House:
+    pet = None  # type: Animal
+
+h = House()
+h.pet = Dog()
+
+while bool():
+    if isinstance(h.pet, Dog):
+        if isinstance(h.pet.paws, str):
+            x = h.pet.paws + 'a'
+        y = h.pet.paws + 1   # E: Unsupported operand types for + (likely involving Union)
+        z = h.pet.paws + 'a' # E: Unsupported operand types for + (likely involving Union)
+        if isinstance(h.pet.paws, str):
+            x = h.pet.paws + 'a'
+            break
+        y = h.pet.paws + 1
+        z = h.pet.paws + 'a' # E: Unsupported operand types for + ("int" and "str")
+[builtins fixtures/isinstancelist.pyi]
+[case testIsInstanceSubClassReset]
+class A: pass
+class B(A): b=1
+
+class C:
+    a = A()
+
+x = C()
+x.a.b                   # E: "A" has no attribute "b"
+if isinstance(x.a, B):
+    x.a.b
+    x = C()
+    x.a.b               # E: "A" has no attribute "b"
+[builtins fixtures/isinstance.pyi]
+[case testIsinstanceTuple]
+from typing import Union
+
+class A:
+    pass
+
+class B:
+    def method2(self, arg: int):
+        return 123
+
+class C:
+    def method2(self, arg: int):
+        return 456
+
+    def method3(self, arg: str):
+        return 'abc'
+
+v = A() # type: Union[A, B, C]
+
+if isinstance(v, (B, C)):
+    v.method2(123)
+    v.method3('xyz') # E: Some element of union has no attribute "method3"
+[builtins fixtures/isinstance.pyi]
+
+[case testMemberAssignmentChanges-skip]
+from typing import Union
+
+class Dog:
+    paws = 1  # type: Union[int, str]
+
+pet = Dog()
+
+pet.paws + 'a'  # E: moo
+pet.paws = 'a'
+pet.paws + 'a'
+pet.paws = 1
+pet.paws + 1
+
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsInstanceSubClassMemberHard-skip]
+from typing import Union
+
+class Animal:
+    pass
+
+class Dog(Animal):
+    paws = 4  # type: Union[int, str]
+
+    def bark(self): pass
+
+class House:
+    pet = None  # type: Animal
+
+h = House()
+h.pet = Dog()
+
+if isinstance(h.pet, Dog):
+    if isinstance(h.pet.paws, str):
+        for i in [1]:
+            h.pet.paws + 'a'
+            if bool():
+                break
+            h.pet.paws = 1
+            h.pet.paws + 1
+
+    if isinstance(h.pet.paws, str):
+        h.pet.paws + 'a'
+    else:
+        h.pet.paws + 1
+
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsInstanceReturn]
+from typing import Union
+
+def foo() -> None:
+    x = 1 # type: Union[int, str]
+    if isinstance(x, int):
+        return
+    y = x + 'asdad'
+
+def bar() -> None:
+    x = 1 # type: Union[int, str]
+    if isinstance(x, int):
+        return
+    else:
+        pass
+    y = x + 'asdad'
+
+foo()
+[builtins fixtures/isinstancelist.pyi]
+[case testIsInstanceBadBreak]
+from typing import Union
+
+def foo() -> None:
+    x = None # type: Union[int, str]
+    if isinstance(x, int):
+        for z in [1,2]:
+            break
+    else:
+        pass
+    y = x + 'asdad'    # E: Unsupported operand types for + (likely involving Union)
+
+foo()
+[builtins fixtures/isinstancelist.pyi]
+[out]
+[case testIsInstanceThreeUnion]
+from typing import Union, List
+
+x = None # type: Union[int, str, List[int]]
+
+while bool():
+    if isinstance(x, int):
+        x + 1
+    elif isinstance(x, str):
+        x + 'a'
+    else:
+        x + [1]
+    x + 'a'           # E: Unsupported operand types for + (likely involving Union)
+x + [1]               # E: Unsupported operand types for + (likely involving Union)
+
+[builtins fixtures/isinstancelist.pyi]
+[case testIsInstanceThreeUnion2]
+from typing import Union, List
+
+x = None # type: Union[int, str, List[int]]
+
+while bool():
+    if isinstance(x, int):
+        x + 1
+        break
+    elif isinstance(x, str):
+        x + 'a'
+        break
+    x + [1]
+    x + 'a'           # E: Unsupported operand types for + ("list" and "str")
+x + [1]               # E: Unsupported operand types for + (likely involving Union)
+
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsInstanceThreeUnion3]
+from typing import Union, List
+
+while bool():
+    x = None # type: Union[int, str, List[int]]
+    x = 1
+    if isinstance(x, int):
+        x + 1
+        break
+    elif isinstance(x, str):
+        x + 'a'
+        break
+    x + [1]           # These lines aren't reached because x was an int
+    x + 'a'
+x + [1]               # E: Unsupported operand types for + (likely involving Union)
+
+[builtins fixtures/isinstancelist.pyi]
+
+[case testRemovingTypeRepeatedly]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+for i in [1, 2]:
+    x = foo()
+    x + 'a'                 # E: Unsupported operand types for + (likely involving Union)
+    if isinstance(x, int):
+        break
+    x + 'a'
+
+    x = foo()
+    x + 'a'                 # E: Unsupported operand types for + (likely involving Union)
+    if isinstance(x, int):
+        break
+    x + 'a'
+
+    x = foo()
+    x + 'a'                 # E: Unsupported operand types for + (likely involving Union)
+    if isinstance(x, int):
+        break
+    x + 'a'
+
+x + 'a'                    # E: Unsupported operand types for + (likely involving Union)
+
+[builtins fixtures/isinstancelist.pyi]
+
+
+
+[case testModifyRepeatedly]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+x = foo()
+x + 1     # E: Unsupported operand types for + (likely involving Union)
+x + 'a'   # E: Unsupported operand types for + (likely involving Union)
+
+x = 1
+x + 1
+x + 'a'   # E: Unsupported operand types for + ("int" and "str")
+
+x = 'a'
+x + 1     # E: Unsupported operand types for + ("str" and "int")
+x + 'a'
+
+x = foo()
+x + 1     # E: Unsupported operand types for + (likely involving Union)
+x + 'a'   # E: Unsupported operand types for + (likely involving Union)
+
+[builtins fixtures/isinstancelist.pyi]
+
+[case testModifyLoop]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+x = foo()
+x + 1      # E: Unsupported operand types for + (likely involving Union)
+x = 'a'
+x + 1      # E: Unsupported operand types for + ("str" and "int")
+x = 1
+x + 1
+
+while bool():
+      x + 1    # E: Unsupported operand types for + (likely involving Union)
+      x = 'a'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testModifyLoop2]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+x = foo()
+x + 1      # E: Unsupported operand types for + (likely involving Union)
+x = 'a'
+x + 1      # E: Unsupported operand types for + ("str" and "int")
+x = 1
+x + 1
+
+for i in [1]:
+      x = 'a'
+
+x + 1    # E: Unsupported operand types for + (likely involving Union)
+
+[builtins fixtures/isinstancelist.pyi]
+
+
+[case testModifyLoop3]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+x = foo()
+x = 1
+
+while bool():
+    x + 1
+    x = 'a'
+    break
+else:
+    x + 1
+x + 1      # E: Unsupported operand types for + (likely involving Union)
+x = 1
+for y in [1]:
+    x + 1
+    x = 'a'
+    break
+else:
+    x + 1
+x + 1      # E: Unsupported operand types for + (likely involving Union)
+[builtins fixtures/isinstancelist.pyi]
+
+[case testModifyLoopWhile4]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+x = foo()
+x = 1
+
+while bool():
+    x + 1
+    if bool():
+        x = 'a'
+        break
+else:
+    x + 1
+    x = 'a'
+x + 'a'
+x = 1
+while bool():
+    x + 1 # E: Unsupported operand types for + (likely involving Union)
+    if bool():
+        x = 'a'
+        continue
+else:
+    x + 1 # E: Unsupported operand types for + (likely involving Union)
+    x = 'a'
+x + 'a'
+[builtins fixtures/isinstancelist.pyi]
+[case testModifyLoopFor4]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+x = foo()
+x = 1
+
+for y in [1]:
+    x + 1
+    if bool():
+        x = 'a'
+        break
+else:
+    x + 1
+    x = 'a'
+x + 'a'
+x = 1
+for y in [1]:
+    x + 1 # E: Unsupported operand types for + (likely involving Union)
+    if bool():
+        x = 'a'
+        continue
+else:
+    x + 1 # E: Unsupported operand types for + (likely involving Union)
+    x = 'a'
+x + 'a'
+[builtins fixtures/isinstancelist.pyi]
+[case testModifyNestedLoop]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+x = foo()
+x = 1
+
+for y in [1]:
+    for z in [1]:
+        break
+    else:
+        x = 'a'
+        break
+else:
+    x + 1
+x + 1 # E: Unsupported operand types for + (likely involving Union)
+x = 1
+while bool():
+    while bool():
+        break
+    else:
+        x = 'a'
+        break
+else:
+    x + 1
+x + 1 # E: Unsupported operand types for + (likely involving Union)
+[builtins fixtures/isinstancelist.pyi]
+
+[case testModifyLoopLong]
+from typing import Union
+
+class A: a = 1
+
+def foo() -> Union[int, str, A]: pass
+
+def bar() -> None:
+    x = foo()
+    x + 1  # E: Unsupported left operand type for + (some union)
+    if isinstance(x, A):
+       x.a
+    else:
+       if isinstance(x, int):
+          x + 1
+          x + 'a'  # E: Unsupported operand types for + ("int" and "str")
+       else:
+          x + 'a'
+          x.a      # E: "str" has no attribute "a"
+       x = A()
+
+    if isinstance(x, str):
+       x + 'a'
+    else:
+       while bool():
+          if isinstance(x, int):
+             x + 1
+          else:
+             x.a
+          break
+       while bool():
+          if isinstance(x, int):
+             x + 1
+          else:
+             x.a
+          continue
+
+       #for i in [1]:
+       while bool():
+          if isinstance(x, int):
+             x + 1
+          else:
+             x.a     # E: Some element of union has no attribute "a"
+          x = 'a'
+
+[builtins fixtures/isinstancelist.pyi]
+[out]
+
+[case testWhileExitCondition1]
+from typing import Union
+x = 1  # type: Union[int, str]
+while isinstance(x, int):
+    if bool():
+        continue
+    x = 'a'
+else:
+    reveal_type(x)  # E: Revealed type is 'builtins.str'
+reveal_type(x)  # E: Revealed type is 'builtins.str'
+[builtins fixtures/isinstance.pyi]
+
+[case testWhileExitCondition2]
+from typing import Union
+x = 1  # type: Union[int, str]
+while isinstance(x, int):
+    if bool():
+        break
+    x = 'a'
+else:
+    reveal_type(x)  # E: Revealed type is 'builtins.str'
+reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+[builtins fixtures/isinstance.pyi]
+
+[case testWhileLinkedList]
+from typing import Union
+LinkedList = Union['Cons', 'Nil']
+class Nil: pass
+class Cons:
+    tail = None  # type: LinkedList
+def last(x: LinkedList) -> Nil:
+    while isinstance(x, Cons):
+        x = x.tail
+    return x
+[builtins fixtures/isinstance.pyi]
+
+[case testReturnAndFlow]
+def foo() -> int:
+    return 1 and 2
+    return 'a'
+[case testCastIsinstance]
+from typing import Union
+
+def foo() -> Union[int, str]: pass
+
+x = foo()
+y = 1 # type: int
+
+if isinstance(x, str):
+    x = y
+x + 1
+x + 'a'   # E: Unsupported operand types for + ("int" and "str")
+
+[builtins fixtures/isinstancelist.pyi]
+
+
+[case testUnreachableCode]
+x = 1 # type: int
+
+while bool():
+    x = 'a'           # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+    break
+    x = 'a'           # Note: no error because unreachable code
+[builtins fixtures/isinstancelist.pyi]
+
+[case testUnreachableCode2]
+x = 1
+while bool():
+    try:
+        pass
+    except:
+        continue
+    else:
+        continue
+    x + 'a'
+[builtins fixtures/isinstance.pyi]
+
+[case testUnreachableWhileTrue]
+def f(x: int) -> None:
+    while True:
+        if x:
+            return
+    1()
+[builtins fixtures/bool.pyi]
+
+[case testUnreachableAssertFalse]
+def f() -> None:
+    assert False
+    1()
+[builtins fixtures/bool.pyi]
+
+[case testUnreachableAssertFalse2]
+# flags: --fast-parser
+def f() -> None:
+    # The old parser doesn't understand the syntax below
+    assert False, "hi"
+    1()
+[builtins fixtures/bool.pyi]
+
+[case testUnreachableReturnOrAssertFalse]
+def f(x: int) -> int:
+    if x:
+        return x
+    else:
+        assert False
+    1()
+[builtins fixtures/bool.pyi]
+
+[case testUnreachableTryExcept]
+def f() -> None:
+    try:
+        f()
+        return
+    except BaseException:
+        return
+    1()
+[builtins fixtures/exception.pyi]
+
+[case testUnreachableTryExceptElse]
+def f() -> None:
+    try:
+        f()
+    except BaseException:
+        return
+    else:
+        return
+    1()
+[builtins fixtures/exception.pyi]
+
+[case testUnreachableTryReturnFinally1]
+def f() -> None:
+    try:
+        return
+    finally:
+        pass
+    1()
+
+[case testUnreachableTryReturnFinally2]
+def f() -> None:
+    try:
+        pass
+    finally:
+        return
+    1()
+
+[case testUnreachableTryReturnExceptRaise]
+def f() -> None:
+    try:
+        return
+    except:
+        raise
+    1()
+
+[case testUnreachableReturnLambda]
+from typing import Callable
+def g(t: Callable[[int], int]) -> int: pass
+def f() -> int:
+    return g(lambda x: x)
+    1()
+
+[case testIsinstanceAnd]
+class A:
+    pass
+
+class B(A):
+    flag = 1
+
+x = B() # type: A
+
+if isinstance(x, B) and 1:
+   x.flag
+[builtins fixtures/isinstancelist.pyi]
+[case testIsinstanceShortcircuit]
+class A:
+    pass
+
+class B(A):
+    flag = 1
+
+x = B() # type: A
+
+if isinstance(x, B) and x.flag:
+    pass
+if isinstance(x, B) or x.flag: # E: "A" has no attribute "flag"
+    pass
+if not isinstance(x, B) or x.flag:
+    pass
+if not isinstance(x, B) and x.flag: # E: "A" has no attribute "flag"
+    pass
+[builtins fixtures/isinstancelist.pyi]
+[case testIsinstanceExpression]
+class A:
+    pass
+
+class B(A):
+    flag = 1
+
+x = B() # type: A
+
+x.flag if isinstance(x, B) else 0
+0 if not isinstance(x, B) else x.flag
+0 if isinstance(x, B) else x.flag # E: "A" has no attribute "flag"
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsinstanceMultiAnd]
+class A:
+    pass
+
+class B(A):
+    flag = 1
+
+class C(A):
+    glaf = 1
+
+x = B() # type: A
+y = C() # type: A
+
+if isinstance(x, B) and isinstance(y, C):
+    x.flag += 1
+    y.glaf += 1
+    x() # E: "B" not callable
+    y() # E: "C" not callable
+else:
+    x() # E: "A" not callable
+    y() # E: "A" not callable
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsinstanceMultiAndSpecialCase]
+class A:
+    pass
+
+class B(A):
+    flag = 1
+
+class C(A):
+    glaf = 1
+
+x = B() # type: A
+y = C() # type: A
+
+if isinstance(x, B) and isinstance(y, int):
+    1() # type checking skipped
+if isinstance(y, int) and isinstance(x, B):
+    1() # type checking skipped
+[builtins fixtures/isinstancelist.pyi]
+
+[case testReturnWithCallExprAndIsinstance]
+
+from typing import Union
+def f(x: Union[int, str]) -> None:
+    if not isinstance(x, int):
+        return foo()
+    x()  # E: "int" not callable
+def foo(): pass
+[builtins fixtures/isinstancelist.pyi]
+[out]
+
+[case testIsinstanceOr1]
+from typing import Optional
+def f(a: bool, x: object) -> Optional[int]:
+    if a or not isinstance(x, int):
+        return None
+    reveal_type(x) # E: Revealed type is 'builtins.int'
+    return x
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceOr2]
+from typing import Optional
+def g(a: bool, x: object) -> Optional[int]:
+    if not isinstance(x, int) or a:
+        return None
+    reveal_type(x) # E: Revealed type is 'builtins.int'
+    return x
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceOr3]
+from typing import Optional
+def h(a: bool, x: object) -> Optional[int]:
+    if a or isinstance(x, int):
+        return None
+    return x # E: Incompatible return value type (got "object", expected "int")
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceWithOverlappingUnionType]
+from typing import Union
+def f(x: Union[float, int]) -> None:
+    if isinstance(x, float):
+        pass
+    if not isinstance(x, int):
+        f(x)
+[builtins fixtures/isinstance.pyi]
+
+[case testIsinstanceWithOverlappingUnionType2]
+from typing import Union
+class A: pass
+class B(A): pass
+def f(x: Union[A, B]) -> None:
+    if isinstance(x, A):
+        pass
+    if not isinstance(x, B):
+        f(x)
+[builtins fixtures/isinstance.pyi]
+
+[case testIsinstanceOfSuperclass]
+class A: pass
+class B(A): pass
+x = B()
+if isinstance(x, A):
+    reveal_type(x)  # E: Revealed type is '__main__.B'
+if not isinstance(x, A):
+    reveal_type(x)  # unreachable
+    x = A()
+reveal_type(x)  # E: Revealed type is '__main__.B'
+[builtins fixtures/isinstance.pyi]
+
+[case testIsinstanceOfNonoverlapping]
+class A: pass
+class B: pass
+x = B()
+if isinstance(x, A):
+    reveal_type(x)  # unreachable
+else:
+    reveal_type(x)  # E: Revealed type is '__main__.B'
+[builtins fixtures/isinstance.pyi]
+
+[case testAssertIsinstance]
+def f(x: object):
+    assert isinstance(x, int)
+    y = 0 # type: int
+    y = x
+[builtins fixtures/isinstance.pyi]
+
+[case testUnionAssertIsinstance]
+from typing import Union
+def f(x: Union[str, int]):
+    assert isinstance(x, int)
+    y = 0 # type: int
+    y = x
+[builtins fixtures/isinstance.pyi]
+
+[case testAnyAssertIsinstance]
+from typing import Any
+def f(x: Any):
+    assert isinstance(x, int)  # this should narrow x to type int
+    x + "foo"  # E: Unsupported operand types for + ("int" and "str")
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceOfGenericClassRetainsParameters]
+from typing import List, Union
+def f(x: Union[List[int], str]) -> None:
+    if isinstance(x, list):
+        x[0]()
+[builtins fixtures/isinstancelist.pyi]
+[out]
+main:4: error: "int" not callable
+
+[case testIsinstanceOrIsinstance]
+class A: pass
+class B(A):
+    flag = 1
+class C(A):
+    flag = 2
+x1 = A()
+if isinstance(x1, B) or isinstance(x1, C):
+    reveal_type(x1) # E: Revealed type is 'Union[__main__.B, __main__.C]'
+    f = x1.flag  # type: int
+else:
+    reveal_type(x1) # E: Revealed type is '__main__.A'
+    f = 0
+x2 = A()
+if isinstance(x2, A) or isinstance(x2, C):
+    reveal_type(x2) # E: Revealed type is '__main__.A'
+    f = x2.flag # E: "A" has no attribute "flag"
+else:
+    # unreachable
+    1()
+[builtins fixtures/isinstance.pyi]
+[out]
+[case testComprehensionIsInstance]
+from typing import List, Union
+a = [] # type: List[Union[int, str]]
+l = [x for x in a if isinstance(x, int)]
+g = (x for x in a if isinstance(x, int))
+d = {0: x for x in a if isinstance(x, int)}
+reveal_type(l) # E: Revealed type is 'builtins.list[builtins.int*]'
+reveal_type(g) # E: Revealed type is 'typing.Iterator[builtins.int*]'
+reveal_type(d) # E: Revealed type is 'builtins.dict[builtins.int*, builtins.int*]'
+[builtins fixtures/isinstancelist.pyi]
+
+[case testIsinstanceInWrongOrderInBooleanOp]
+class A:
+    m = 1
+def f(x: object) -> None:
+    if x.m and isinstance(x, A) or False:  # E: "object" has no attribute "m"
+        pass
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceAndOr]
+class A:
+    a = None  # type: A
+
+def f(x: object) -> None:
+    b = isinstance(x, A) and x.a or A()
+    reveal_type(b)  # E: Revealed type is '__main__.A'
+[builtins fixtures/isinstance.pyi]
+[out]
diff --git a/test-data/unit/check-kwargs.test b/test-data/unit/check-kwargs.test
new file mode 100644
index 0000000..ac8f388
--- /dev/null
+++ b/test-data/unit/check-kwargs.test
@@ -0,0 +1,339 @@
+-- Test cases for keyword arguments.
+
+
+[case testTypeErrorInKeywordArgument]
+import typing
+def f(o: object) -> None: pass
+f(o=None()) # E: None not callable
+
+[case testSimpleKeywordArgument]
+import typing
+def f(a: 'A') -> None: pass
+f(a=A())
+f(a=object()) # E: Argument 1 to "f" has incompatible type "object"; expected "A"
+class A: pass
+
+[case testTwoKeywordArgumentsNotInOrder]
+import typing
+def f(a: 'A', b: 'B') -> None: pass
+f(b=A(), a=A()) # E: Argument 1 to "f" has incompatible type "A"; expected "B"
+f(b=B(), a=B()) # E: Argument 2 to "f" has incompatible type "B"; expected "A"
+f(a=A(), b=B())
+f(b=B(), a=A())
+class A: pass
+class B: pass
+
+[case testOneOfSeveralOptionalKeywordArguments]
+import typing
+def f(a: 'A' = None, b: 'B' = None, c: 'C' = None) -> None: pass
+f(a=A())
+f(b=B())
+f(c=C())
+f(b=B(), c=C())
+f(a=B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
+f(b=A()) # E: Argument 1 to "f" has incompatible type "A"; expected "B"
+f(c=B()) # E: Argument 1 to "f" has incompatible type "B"; expected "C"
+f(b=B(), c=A()) # E: Argument 2 to "f" has incompatible type "A"; expected "C"
+class A: pass
+class B: pass
+class C: pass
+
+[case testBothPositionalAndKeywordArguments]
+import typing
+def f(a: 'A', b: 'B') -> None: pass
+f(A(), b=A()) # E: Argument 2 to "f" has incompatible type "A"; expected "B"
+f(A(), b=B())
+class A: pass
+class B: pass
+
+[case testContextSensitiveTypeInferenceForKeywordArg]
+from typing import List
+def f(a: 'A', b: 'List[A]') -> None: pass
+f(b=[], a=A())
+class A: pass
+[builtins fixtures/list.pyi]
+
+[case testGivingSameKeywordArgumentTwice]
+import typing
+def f(a: 'A', b: 'B') -> None: pass
+f(a=A(), b=B(), a=A()) # E: keyword argument repeated
+class A: pass
+class B: pass
+
+[case testGivingArgumentAsPositionalAndKeywordArg]
+import typing
+def f(a: 'A', b: 'B' = None) -> None: pass
+f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a"
+class A: pass
+class B: pass
+
+[case testGivingArgumentAsPositionalAndKeywordArg2]
+import typing
+def f(a: 'A' = None, b: 'B' = None) -> None: pass
+f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a"
+class A: pass
+class B: pass
+
+[case testPositionalAndKeywordForSameArg]
+# This used to crash in check_argument_count(). See #1095.
+def f(a: int): pass
+def g(): f(0, a=1)
+[out]
+
+[case testInvalidKeywordArgument]
+import typing
+def f(a: 'A') -> None: pass # N: "f" defined here
+f(b=object()) # E: Unexpected keyword argument "b" for "f"
+class A: pass
+
+[case testKeywordArgumentsWithDynamicallyTypedCallable]
+from typing import Any
+f = None # type: Any
+f(x=f(), z=None()) # E: None not callable
+f(f, zz=None()) # E: None not callable
+f(x=None)
+
+[case testKeywordArgumentWithFunctionObject]
+from typing import Callable
+f = None # type: Callable[[A, B], None]
+f(a=A(), b=B())
+f(A(), b=B())
+class A: pass
+class B: pass
+[out]
+main:3: error: Unexpected keyword argument "a"
+main:3: error: Unexpected keyword argument "b"
+main:4: error: Unexpected keyword argument "b"
+
+[case testKeywordOnlyArguments]
+import typing
+def f(a: 'A', *, b: 'B' = None) -> None: pass
+def g(a: 'A', *, b: 'B') -> None: pass
+def h(a: 'A', *, b: 'B', aa: 'A') -> None: pass
+def i(a: 'A', *, b: 'B', aa: 'A' = None) -> None: pass
+f(A(), b=B())
+f(b=B(), a=A())
+f(A())
+f(A(), B()) # E: Too many positional arguments for "f"
+g(A(), b=B())
+g(b=B(), a=A())
+g(A()) # E: Missing named argument "b" for function "g"
+g(A(), B()) # E: Too many positional arguments for "g"
+h(A()) # E: Missing named argument "b" for function "h" # E: Missing named argument "aa" for function "h"
+h(A(), b=B()) # E: Missing named argument "aa" for function "h"
+h(A(), aa=A()) # E: Missing named argument "b" for function "h"
+h(A(), b=B(), aa=A())
+h(A(), aa=A(), b=B())
+i(A()) # E: Missing named argument "b" for function "i"
+i(A(), b=B())
+i(A(), aa=A()) # E: Missing named argument "b" for function "i"
+i(A(), b=B(), aa=A())
+i(A(), aa=A(), b=B())
+
+class A: pass
+class B: pass
+
+[case testKeywordOnlyArgumentsFastparse]
+# flags: --fast-parser
+import typing
+def f(a: 'A', *, b: 'B' = None) -> None: pass
+def g(a: 'A', *, b: 'B') -> None: pass
+def h(a: 'A', *, b: 'B', aa: 'A') -> None: pass
+def i(a: 'A', *, b: 'B', aa: 'A' = None) -> None: pass
+f(A(), b=B())
+f(b=B(), a=A())
+f(A())
+f(A(), B()) # E: Too many positional arguments for "f"
+g(A(), b=B())
+g(b=B(), a=A())
+g(A()) # E: Missing named argument "b" for function "g"
+g(A(), B()) # E: Too many positional arguments for "g"
+h(A()) # E: Missing named argument "b" for function "h" # E: Missing named argument "aa" for function "h"
+h(A(), b=B()) # E: Missing named argument "aa" for function "h"
+h(A(), aa=A()) # E: Missing named argument "b" for function "h"
+h(A(), b=B(), aa=A())
+h(A(), aa=A(), b=B())
+i(A()) # E: Missing named argument "b" for function "i"
+i(A(), b=B())
+i(A(), aa=A()) # E: Missing named argument "b" for function "i"
+i(A(), b=B(), aa=A())
+i(A(), aa=A(), b=B())
+
+class A: pass
+class B: pass
+
+[case testKwargsAfterBareArgs]
+from typing import Tuple, Any
+def f(a, *, b=None) -> None: pass
+a = None  # type: Any
+b = None  # type: Any
+f(a, **b)
+
+[builtins fixtures/dict.pyi]
+
+[case testKeywordArgAfterVarArgs]
+import typing
+def f(*a: 'A', b: 'B' = None) -> None: pass
+f()
+f(A())
+f(A(), A())
+f(b=B())
+f(A(), b=B())
+f(A(), A(), b=B())
+f(B())      # E: Argument 1 to "f" has incompatible type "B"; expected "A"
+f(A(), B()) # E: Argument 2 to "f" has incompatible type "B"; expected "A"
+f(b=A())    # E: Argument 1 to "f" has incompatible type "A"; expected "B"
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+[case testKeywordArgAfterVarArgsWithBothCallerAndCalleeVarArgs]
+from typing import List
+def f(*a: 'A', b: 'B' = None) -> None: pass
+a = None # type: List[A]
+f(*a)
+f(A(), *a)
+f(b=B())
+f(*a, b=B())
+f(A(), *a, b=B())
+f(A(), B())   # E: Argument 2 to "f" has incompatible type "B"; expected "A"
+f(A(), b=A()) # E: Argument 2 to "f" has incompatible type "A"; expected "B"
+f(*a, b=A())  # E: Argument 2 to "f" has incompatible type "A"; expected "B"
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+[case testCallingDynamicallyTypedFunctionWithKeywordArgs]
+import typing
+def f(x, y=A()): pass
+f(x=A(), y=A())
+f(y=A(), x=A())
+f(y=A())      # E: Missing positional argument "x" in call to "f"
+f(A(), z=A()) # E: Unexpected keyword argument "z" for "f"
+class A: pass
+
+[case testKwargsArgumentInFunctionBody]
+from typing import Dict, Any
+def f( **kwargs: 'A') -> None:
+    d1 = kwargs # type: Dict[str, A]
+    d2 = kwargs # type: Dict[A, Any] # E: Incompatible types in assignment (expression has type Dict[str, A], variable has type Dict[A, Any])
+    d3 = kwargs # type: Dict[Any, str] # E: Incompatible types in assignment (expression has type Dict[str, A], variable has type Dict[Any, str])
+class A: pass
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testKwargsArgumentInFunctionBodyWithImplicitAny]
+from typing import Dict, Any
+def f(**kwargs) -> None:
+    d1 = kwargs # type: Dict[str, A]
+    d2 = kwargs # type: Dict[str, str]
+    d3 = kwargs # type: Dict[A, Any] # E: Incompatible types in assignment (expression has type Dict[str, Any], variable has type Dict[A, Any])
+class A: pass
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testCallingFunctionThatAcceptsVarKwargs]
+import typing
+def f( **kwargs: 'A') -> None: pass
+f()
+f(x=A())
+f(y=A(), z=A())
+f(x=B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
+f(A())   # E: Too many arguments for "f"
+# Perhaps a better message would be "Too many *positional* arguments..."
+class A: pass
+class B: pass
+[builtins fixtures/dict.pyi]
+
+[case testCallingFunctionWithKeywordVarArgs]
+from typing import Dict
+def f( **kwargs: 'A') -> None: pass
+d = None # type: Dict[str, A]
+f(**d)
+f(x=A(), **d)
+d2 = None # type: Dict[str, B]
+f(**d2)        # E: Argument 1 to "f" has incompatible type **Dict[str, B]; expected "A"
+f(x=A(), **d2) # E: Argument 2 to "f" has incompatible type **Dict[str, B]; expected "A"
+class A: pass
+class B: pass
+[builtins fixtures/dict.pyi]
+
+[case testInvalidTypeForKeywordVarArg]
+from typing import Dict
+def f( **kwargs: 'A') -> None: pass
+d = None # type: Dict[A, A]
+f(**d)         # E: Keywords must be strings
+f(**A())       # E: Argument after ** must be a dictionary
+class A: pass
+[builtins fixtures/dict.pyi]
+
+[case testPassingKeywordVarArgsToNonVarArgsFunction]
+from typing import Any, Dict
+def f(a: 'A', b: 'B') -> None: pass
+d = None # type: Dict[str, Any]
+f(**d)
+d2 = None # type: Dict[str, A]
+f(**d2) # E: Argument 1 to "f" has incompatible type **Dict[str, A]; expected "B"
+class A: pass
+class B: pass
+[builtins fixtures/dict.pyi]
+
+[case testBothKindsOfVarArgs]
+from typing import Any, List, Dict
+def f(a: 'A', b: 'A') -> None: pass
+l = None # type: List[Any]
+d = None # type: Dict[Any, Any]
+f(*l, **d)
+class A: pass
+[builtins fixtures/dict.pyi]
+
+[case testKeywordArgumentAndCommentSignature]
+import typing
+def f(x): # type: (int) -> str # N: "f" defined here
+    pass
+f(x='') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+f(x=0)
+f(y=0) # E: Unexpected keyword argument "y" for "f"
+
+[case testKeywordArgumentAndCommentSignature2]
+import typing
+class A:
+    def f(self, x): # type: (int) -> str
+        pass
+A().f(x='') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+A().f(x=0)
+A().f(y=0) # E: Unexpected keyword argument "y" for "f" of "A"
+
+[case testKeywordVarArgsAndCommentSignature]
+import typing
+def f(**kwargs): # type: (**int) -> None
+    pass
+f(z=1)
+f(x=1, y=1)
+f(x='', y=1) # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+f(x=1, y='') # E: Argument 2 to "f" has incompatible type "str"; expected "int"
+[builtins fixtures/dict.pyi]
+
+[case testCallsWithStars]
+def f(a: int) -> None:
+    pass
+
+s = ('',)
+f(*s) # E: Argument 1 to "f" has incompatible type *"Tuple[str]"; expected "int"
+
+a = {'': 0}
+f(a) # E: Argument 1 to "f" has incompatible type Dict[str, int]; expected "int"
+f(**a) # okay
+
+b = {'': ''}
+f(b) # E: Argument 1 to "f" has incompatible type Dict[str, str]; expected "int"
+f(**b) # E: Argument 1 to "f" has incompatible type **Dict[str, str]; expected "int"
+
+c = {0: 0}
+f(**c) # E: Keywords must be strings
+[builtins fixtures/dict.pyi]
+
+[case testCallStar2WithStar]
+def f(**k): pass
+f(*(1, 2))  # E: Too many arguments for "f"
+[builtins fixtures/dict.pyi]
diff --git a/test-data/unit/check-lists.test b/test-data/unit/check-lists.test
new file mode 100644
index 0000000..c9c67e8
--- /dev/null
+++ b/test-data/unit/check-lists.test
@@ -0,0 +1,72 @@
+-- Nested list assignment
+-- -----------------------------
+
+[case testNestedListAssignment]
+from typing import List
+a1, b1, c1 = None, None, None # type: (A, B, C)
+a2, b2, c2 = None, None, None # type: (A, B, C)
+
+a1, [b1, c1] = a2, [b2, c2]
+a1, [a1, [b1, c1]] = a2, [a2, [b2, c2]]
+a1, [a1, [a1, b1]] = a1, [a1, [a1, c1]]  # E: Incompatible types in assignment (expression has type "C", variable has type "B")
+
+class A: pass
+class B: pass
+class C: pass
+[builtins fixtures/list.pyi]
+[out]
+
+[case testNestedListAssignmentToTuple]
+from typing import List
+a, b, c = None, None, None # type: (A, B, C)
+
+a, b = [a, b]
+a, b = [a]  # E: Need more than 1 value to unpack (2 expected)
+a, b = [a, b, c]  # E: Too many values to unpack (2 expected, 3 provided)
+
+class A: pass
+class B: pass
+class C: pass
+[builtins fixtures/list.pyi]
+[out]
+
+[case testListAssignmentFromTuple]
+from typing import List
+a, b, c = None, None, None # type: (A, B, C)
+t = a, b
+
+[a, b], c = t, c
+[a, c], c = t, c  # E: Incompatible types in assignment (expression has type "B", variable has type "C")
+[a, a, a], c = t, c  # E: Need more than 2 values to unpack (3 expected)
+[a], c = t, c  # E: Too many values to unpack (1 expected, 2 provided)
+
+class A: pass
+class B: pass
+class C: pass
+[builtins fixtures/list.pyi]
+[out]
+
+[case testListAssignmentUnequalAmountToUnpack]
+from typing import List
+a, b, c = None, None, None # type: (A, B, C)
+
+def f() -> None: # needed because test parser tries to parse [a, b] as section header
+    [a, b] = [a, b]
+    [a, b] = [a]  # E: Need more than 1 value to unpack (2 expected)
+    [a, b] = [a, b, c]  # E: Too many values to unpack (2 expected, 3 provided)
+
+class A: pass
+class B: pass
+class C: pass
+[builtins fixtures/list.pyi]
+[out]
+
+[case testListWithStarExpr]
+(x, *a) = [1, 2, 3]
+a = [1, *[2, 3]]
+reveal_type(a)  # E: Revealed type is 'builtins.list[builtins.int]'
+b = [0, *a]
+reveal_type(b)  # E: Revealed type is 'builtins.list[builtins.int*]'
+c = [*a, 0]
+reveal_type(c)  # E: Revealed type is 'builtins.list[builtins.int*]'
+[builtins fixtures/list.pyi]
diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test
new file mode 100644
index 0000000..2444ce7
--- /dev/null
+++ b/test-data/unit/check-modules.test
@@ -0,0 +1,1408 @@
+-- Type checker test cases dealing with modules and imports.
+
+[case testAccessImportedDefinitions]
+import m
+import typing
+m.f()           # E: Too few arguments for "f"
+m.f(object())   # E: Argument 1 to "f" has incompatible type "object"; expected "A"
+m.x = object()  # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+m.f(m.A())
+m.x = m.A()
+[file m.py]
+class A: pass
+def f(a: A) -> None: pass
+x = A()
+
+[case testAccessImportedDefinitions]
+import m
+import typing
+m.f(object()) # E: Argument 1 to "f" has incompatible type "object"; expected "A"
+m.f(m.A())
+[file m.py]
+class A: pass
+def f(a: A) -> None: pass
+
+[case testAccessImportedDefinitions2]
+from m import f, A
+import typing
+f(object()) # E: Argument 1 to "f" has incompatible type "object"; expected "A"
+f(A())
+[file m.py]
+class A: pass
+def f(a: A) -> None: pass
+
+[case testImportedExceptionType]
+import m
+import typing
+try:
+    pass
+except m.Err:
+    pass
+except m.Bad: # E: Exception type must be derived from BaseException
+    pass
+[file m.py]
+class Err(BaseException): pass
+class Bad: pass
+[builtins fixtures/exception.pyi]
+
+[case testImportedExceptionType2]
+from m import Err, Bad
+import typing
+try:
+    pass
+except Err:
+    pass
+except Bad: # E: Exception type must be derived from BaseException
+    pass
+[file m.py]
+class Err(BaseException): pass
+class Bad: pass
+[builtins fixtures/exception.pyi]
+
+[case testImportWithinBlock]
+import typing
+if 1:
+    import m
+    m.a = m.b   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    m.a = m.a
+    m.f()
+    m.f(m.a)    # E: Too many arguments for "f"
+    m.a = m.A()
+    m.a = m.B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+[file m.py]
+class A: pass
+class B: pass
+a = A()
+b = B()
+def f() -> None: pass
+
+[case testImportWithinFunction]
+import typing
+def f() -> None:
+    from m import a, b, f, A, B
+    a = b   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    a = a
+    f()
+    f(a)    # E: Too many arguments for "f"
+    a = A()
+    a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+[file m.py]
+class A: pass
+class B: pass
+a = A()
+b = B()
+def f() -> None: pass
+[out]
+
+[case testImportWithinMethod]
+import typing
+class C:
+    def f(self) -> None:
+        from m import *
+        a = b   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+        a = a
+        f()
+        f(a)    # E: Too many arguments for "f"
+        a = A()
+        a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+[file m.py]
+class A: pass
+class B: pass
+a = A()
+b = B()
+def f() -> None: pass
+[out]
+
+[case testImportWithinClassBody]
+import typing
+class C:
+    import m
+    m.f()
+    m.f(C) # E: Too many arguments for "f"
+[file m.py]
+def f() -> None: pass
+[out]
+
+[case testImportWithinClassBody2]
+import typing
+class C:
+    from m import f
+    f()
+    f(C) # E: Too many arguments for "f"
+[file m.py]
+def f() -> None: pass
+[out]
+
+[case testImportWithStub]
+import _m
+_m.f("hola")
+[file _m.pyi]
+def f(c:str) -> None: pass
+[out]
+
+[case testImportWithStubIncompatibleType]
+import _m
+_m.f("hola")
+_m.f(12)  # E: Argument 1 to "f" has incompatible type "int"; expected "str"
+[file _m.py]
+def f(c):
+  print(c)
+[file _m.pyi]
+def f(c:str) -> None: pass
+
+[case testInvalidOperationsOnModules]
+import m
+import typing
+class A: pass
+m()      # E: "module" not callable
+a = m # type: A  # E: Incompatible types in assignment (expression has type "module", variable has type "A")
+m + None # E: Unsupported left operand type for + ("module")
+[file m.py]
+[builtins fixtures/module.pyi]
+
+[case testNameDefinedInDifferentModule]
+import m, n
+import typing
+m.x # E: "module" has no attribute "x"
+[file m.py]
+y = object()
+[file n.py]
+x = object()
+[builtins fixtures/module.pyi]
+
+[case testChainedAssignmentAndImports]
+import m
+
+i, s = None, None # type: (int, str)
+i = m.x
+i = m.y
+s = m.x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+s = m.y # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+[file m.py]
+x = y = 1
+[builtins fixtures/primitives.pyi]
+
+[case testConditionalFunctionDefinitionAndImports]
+import m
+import typing
+m.f(1)
+m.f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+[file m.py]
+x = object()
+if x:
+    def f(x: int) -> None: pass
+else:
+    def f(x: int) -> None: pass
+
+[case testTypeCheckWithUnknownModule]
+import nonexistent
+None + ''
+[out]
+main:1: error: Cannot find module named 'nonexistent'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Unsupported left operand type for + (None)
+
+[case testTypeCheckWithUnknownModule2]
+import m, nonexistent
+None + ''
+m.x = 1
+m.x = ''
+[file m.py]
+x = 1
+[out]
+main:1: error: Cannot find module named 'nonexistent'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Unsupported left operand type for + (None)
+main:4: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testTypeCheckWithUnknownModule3]
+import nonexistent, m
+None + ''
+m.x = 1
+m.x = ''
+[file m.py]
+x = 1
+[out]
+main:1: error: Cannot find module named 'nonexistent'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Unsupported left operand type for + (None)
+main:4: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testTypeCheckWithUnknownModule4]
+import nonexistent, another
+None + ''
+[out]
+main:1: error: Cannot find module named 'nonexistent'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:1: error: Cannot find module named 'another'
+main:2: error: Unsupported left operand type for + (None)
+
+[case testTypeCheckWithUnknownModule5]
+import nonexistent as x
+None + ''
+[out]
+main:1: error: Cannot find module named 'nonexistent'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Unsupported left operand type for + (None)
+
+[case testTypeCheckWithUnknownModuleUsingFromImport]
+from nonexistent import x
+None + ''
+[out]
+main:1: error: Cannot find module named 'nonexistent'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Unsupported left operand type for + (None)
+
+[case testTypeCheckWithUnknownModuleUsingImportStar]
+from nonexistent import *
+None + ''
+[out]
+main:1: error: Cannot find module named 'nonexistent'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Unsupported left operand type for + (None)
+
+[case testAccessingUnknownModule]
+import xyz
+xyz.foo()
+xyz()
+[out]
+main:1: error: Cannot find module named 'xyz'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testAccessingUnknownModule2]
+import xyz, bar
+xyz.foo()
+bar()
+[out]
+main:1: error: Cannot find module named 'xyz'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:1: error: Cannot find module named 'bar'
+
+[case testAccessingUnknownModule3]
+import xyz as z
+xyz.foo()
+z()
+[out]
+main:1: error: Cannot find module named 'xyz'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Name 'xyz' is not defined
+
+[case testAccessingNameImportedFromUnknownModule]
+from xyz import y, z
+y.foo()
+z()
+[out]
+main:1: error: Cannot find module named 'xyz'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testAccessingNameImportedFromUnknownModule2]
+from xyz import *
+y
+[out]
+main:1: error: Cannot find module named 'xyz'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Name 'y' is not defined
+
+[case testAccessingNameImportedFromUnknownModule3]
+from xyz import y as z
+y
+z
+[out]
+main:1: error: Cannot find module named 'xyz'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Name 'y' is not defined
+
+[case testUnknownModuleRedefinition]
+import xab
+def xab(): pass
+[out]
+main:1: error: Cannot find module named 'xab'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testAccessingUnknownModuleFromOtherModule]
+import x
+x.nonexistent.foo
+x.z
+[file x.py]
+import nonexistent
+[builtins fixtures/module.pyi]
+[out]
+tmp/x.py:1: error: Cannot find module named 'nonexistent'
+tmp/x.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:3: error: "module" has no attribute "z"
+
+[case testUnknownModuleImportedWithinFunction]
+def f():
+    import foobar
+def foobar(): pass
+foobar('')
+[out]
+main:2: error: Cannot find module named 'foobar'
+main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:4: error: Too many arguments for "foobar"
+
+[case testUnknownModuleImportedWithinFunction2]
+def f():
+    from foobar import x
+def x(): pass
+x('')
+[out]
+main:2: error: Cannot find module named 'foobar'
+main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:4: error: Too many arguments for "x"
+
+[case testRelativeImports]
+import typing
+import m.a
+m.a.x = m.a.y # Error
+[file m/__init__.py]
+[file m/a.py]
+import typing
+from .b import A, B, x, y
+z = x
+z = y # Error
+[file m/b.py]
+import typing
+class A: pass
+class B: pass
+x = A()
+y = B()
+[out]
+tmp/m/a.py:4: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testRelativeImports2]
+import typing
+import m.a
+m.a.x = m.a.y # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+[file m/__init__.py]
+[file m/a.py]
+import typing
+from .b import A, B, x, y
+[file m/b.py]
+import typing
+class A: pass
+class B: pass
+x = A()
+y = B()
+
+[case testExportedValuesInImportAll]
+import typing
+from m import *
+_ = a
+_ = b
+_ = c
+_ = d
+_ = e
+_ = f # E: Name 'f' is not defined
+_ = _g # E: Name '_g' is not defined
+[file m.py]
+__all__ = ['a']
+__all__ += ('b',)
+__all__.append('c')
+__all__.extend(('d', 'e'))
+
+a = b = c = d = e = f = _g = 1
+[builtins fixtures/module_all.pyi]
+
+[case testAllMustBeSequenceStr]
+import typing
+__all__ = [1, 2, 3]
+[builtins fixtures/module_all.pyi]
+[out]
+main:2: error: Type of __all__ must be Sequence[str], not List[int]
+
+[case testAllMustBeSequenceStr_python2]
+import typing
+__all__ = [1, 2, 3]
+[builtins_py2 fixtures/module_all_python2.pyi]
+[out]
+main:2: error: Type of __all__ must be Sequence[unicode], not List[int]
+
+[case testAllUnicodeSequenceOK_python2]
+import typing
+__all__ = [u'a', u'b', u'c']
+[builtins_py2 fixtures/module_all_python2.pyi]
+
+[out]
+
+[case testEllipsisInitializerInStubFileWithType]
+import m
+m.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+[file m.pyi]
+x = ... # type: int
+
+[case testEllipsisInitializerInStubFileWithoutType]
+import m
+m.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "ellipsis")
+[file m.pyi]
+# Ellipsis is only special with a # type: comment (not sure though if this is great)
+x = ...
+
+[case testEllipsisInitializerInModule]
+x = ... # type: int # E: Incompatible types in assignment (expression has type "ellipsis", variable has type "int")
+
+[case testEllipsisDefaultArgValueInStub]
+import m
+m.f(1)
+m.f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+[file m.pyi]
+def f(x: int = ...) -> None: pass
+
+[case testEllipsisDefaultArgValueInStub2]
+import m
+def f(x: int = ...) -> None: pass
+[file m.pyi]
+def g(x: int = '') -> None: pass
+[out]
+tmp/m.pyi:1: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+main:2: error: Incompatible types in assignment (expression has type "ellipsis", variable has type "int")
+
+[case testEllipsisDefaultArgValueInNonStub]
+def f(x: int = ...) -> None: pass # E: Incompatible types in assignment (expression has type "ellipsis", variable has type "int")
+[out]
+
+[case testStarImportOverlapping]
+from m1 import *
+from m2 import *
+j = ''
+[file m1.py]
+x = 1
+[file m2.py]
+x = 1
+
+[case testStarImportOverlappingMismatch]
+from m1 import *
+from m2 import * # E: Incompatible import of "x" (imported name has type "int", local name has type "str")
+j = ''
+[file m1.py]
+x = ''
+[file m2.py]
+x = 1
+
+[case testStarImportOverridingLocalImports-skip]
+from m1 import *
+from m2 import *
+x = '' # E: TODO (cannot assign str to int)
+[file m1.py]
+x = 1
+[file m2.py]
+x = 1
+
+[case testAssignToFuncDefViaImport]
+from m import *  # E: Incompatible import of "x" (imported name has type "int", local name has type "str")
+f = None # E: Need type annotation for variable
+x = ''
+[file m.py]
+def f(): pass
+x = 1+0
+[out]
+
+
+-- Conditional definitions and function redefinitions via module object
+-- --------------------------------------------------------------------
+
+
+[case testConditionalImportAndAssign]
+try:
+    from m import x
+except:
+    x = None
+try:
+    from m import x as y
+except:
+    y = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+[file m.py]
+x = ''
+
+[case testAssignAndConditionalImport]
+x = ''
+try:
+    from m import x
+except:
+    pass
+y = 1
+try:
+    from m import x as y  # E: Incompatible import of "y" (imported name has type "str", local name has type "int")
+except:
+    pass
+[file m.py]
+x = ''
+
+[case testAssignAndConditionalStarImport]
+x = ''
+y = 1
+try:
+    from m import * # E: Incompatible import of "y" (imported name has type "str", local name has type "int")
+except:
+    pass
+[file m.py]
+x = ''
+y = ''
+
+[case testRedefineImportedFunctionViaImport]
+try:
+    from m import f, g
+except:
+    def f(x): pass
+    def g(x): pass # E: All conditional function variants must have identical signatures
+[file m.py]
+def f(x): pass
+def g(x, y): pass
+
+[case testImportedVariableViaImport]
+try:
+    from m import x
+except:
+    from n import x # E: Incompatible import of "x" (imported name has type "str", local name has type "int")
+[file m.py]
+x = 1
+[file n.py]
+x = ''
+
+[case testRedefineFunctionViaImport]
+def f(x): pass
+def g(x): pass
+try:
+    from m import f, g # E: Incompatible import of "g" (imported name has type Callable[[Any, Any], Any], local name has type Callable[[Any], Any])
+except:
+    pass
+[file m.py]
+def f(x): pass
+def g(x, y): pass
+
+[case testImportVariableAndAssignNone]
+try:
+    from m import x
+except:
+    x = None
+[file m.py]
+x = 1
+
+[case testImportFunctionAndAssignNone]
+try:
+    from m import f
+except:
+    f = None
+[file m.py]
+def f(): pass
+
+[case testImportFunctionAndAssignFunction]
+def g(x): pass
+try:
+    from m import f
+except:
+    f = g
+[file m.py]
+def f(x): pass
+
+[case testImportFunctionAndAssignIncompatible]
+try:
+    from m import f
+except:
+    f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Callable[[], Any])
+[file m.py]
+def f(): pass
+
+[case testAssignToFuncDefViaGlobalDecl2]
+import typing
+from m import f
+def g() -> None:
+    global f
+    f = None
+    f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Callable[[], Any])
+[file m.py]
+def f(): pass
+[out]
+
+[case testAssignToFuncDefViaNestedModules]
+import m.n
+m.n.f = None
+m.n.f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Callable[[], Any])
+[file m/__init__.py]
+[file m/n.py]
+def f(): pass
+[out]
+
+[case testAssignToFuncDefViaModule]
+import m
+m.f = None
+m.f = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Callable[[], Any])
+[file m.py]
+def f(): pass
+[out]
+
+[case testConditionalImportAndAssignNoneToModule]
+if object():
+    import m
+else:
+    m = None
+m.f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "str"
+[file m.py]
+def f(x: str) -> None: pass
+[builtins fixtures/module.pyi]
+[out]
+
+[case testConditionalImportAndAssignInvalidToModule]
+if object():
+    import m
+else:
+    m = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "module")
+[file m.py]
+[builtins fixtures/module.pyi]
+[out]
+
+[case testImportAndAssignToModule]
+import m
+m = None
+m.f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "str"
+[file m.py]
+def f(x: str) -> None: pass
+[builtins fixtures/module.pyi]
+[out]
+
+
+-- Test cases that simulate 'mypy -m modname'
+--
+-- The module name to import is encoded in a comment.
+
+[case testTypeCheckNamedModule]
+# cmd: mypy -m m.a
+[file m/__init__.py]
+None + 1
+[file m/a.py]
+[out]
+tmp/m/__init__.py:1: error: Unsupported left operand type for + (None)
+
+[case testTypeCheckNamedModule2]
+# cmd: mypy -m m.a
+[file m/__init__.py]
+[file m/a.py]
+None + 1
+[out]
+tmp/m/a.py:1: error: Unsupported left operand type for + (None)
+
+[case testTypeCheckNamedModule3]
+# cmd: mypy -m m
+[file m/__init__.py]
+None + 1
+[file m/a.py]
+[out]
+tmp/m/__init__.py:1: error: Unsupported left operand type for + (None)
+
+[case testTypeCheckNamedModule4]
+# cmd: mypy -m m
+[file m/__init__.py]
+[file m/a.py]
+None + 1  # Not analyzed.
+[out]
+
+[case testTypeCheckNamedModule5]
+# cmd: mypy -m m
+None + ''  # Not analyzed.
+[file m.py]
+None + 1
+[out]
+tmp/m.py:1: error: Unsupported left operand type for + (None)
+
+[case testTypeCheckNamedModuleWithImportCycle]
+# cmd: mypy -m m.a
+None + 1  # Does not generate error, as this file won't be analyzed.
+[file m/__init__.py]
+import m.a
+[file m/a.py]
+[out]
+
+
+-- Checks dealing with submodules and different kinds of imports
+-- -------------------------------------------------------------
+
+[case testSubmoduleRegularImportAddsAllParents]
+import a.b.c
+reveal_type(a.value)  # E: Revealed type is 'builtins.int'
+reveal_type(a.b.value)  # E: Revealed type is 'builtins.str'
+reveal_type(a.b.c.value)  # E: Revealed type is 'builtins.float'
+b.value  # E: Name 'b' is not defined
+c.value  # E: Name 'c' is not defined
+
+[file a/__init__.py]
+value = 3
+[file a/b/__init__.py]
+value = "a"
+[file a/b/c.py]
+value = 3.2
+[out]
+
+[case testSubmoduleImportAsDoesNotAddParents]
+import a.b.c as foo
+reveal_type(foo.value)  # E: Revealed type is 'builtins.float'
+a.value  # E: Name 'a' is not defined
+b.value  # E: Name 'b' is not defined
+c.value  # E: Name 'c' is not defined
+
+[file a/__init__.py]
+value = 3
+[file a/b/__init__.py]
+value = "a"
+[file a/b/c.py]
+value = 3.2
+[out]
+
+[case testSubmoduleImportFromDoesNotAddParents]
+from a import b
+reveal_type(b.value)  # E: Revealed type is 'builtins.str'
+b.c.value  # E: "module" has no attribute "c"
+a.value  # E: Name 'a' is not defined
+
+[file a/__init__.py]
+value = 3
+[file a/b/__init__.py]
+value = "a"
+[file a/b/c.py]
+value = 3.2
+[builtins fixtures/module.pyi]
+[out]
+
+[case testSubmoduleImportFromDoesNotAddParents2]
+from a.b import c
+reveal_type(c.value)  # E: Revealed type is 'builtins.float'
+a.value  # E: Name 'a' is not defined
+b.value  # E: Name 'b' is not defined
+
+[file a/__init__.py]
+value = 3
+[file a/b/__init__.py]
+value = "a"
+[file a/b/c.py]
+value = 3.2
+[out]
+
+[case testSubmoduleRegularImportNotDirectlyAddedToParent]
+import a.b.c
+def accept_float(x: float) -> None: pass
+accept_float(a.b.c.value)
+
+[file a/__init__.py]
+value = 3
+b.value
+a.b.value
+
+[file a/b/__init__.py]
+value = "a"
+c.value
+a.b.c.value
+
+[file a/b/c.py]
+value = 3.2
+[out]
+tmp/a/b/__init__.py:2: error: Name 'c' is not defined
+tmp/a/b/__init__.py:3: error: Name 'a' is not defined
+tmp/a/__init__.py:2: error: Name 'b' is not defined
+tmp/a/__init__.py:3: error: Name 'a' is not defined
+
+[case testSubmoduleMixingLocalAndQualifiedNames]
+from a.b import MyClass
+val1 = None  # type: a.b.MyClass  # E: Name 'a' is not defined
+val2 = None  # type: MyClass
+
+[file a/__init__.py]
+[file a/b.py]
+class MyClass: pass
+[out]
+
+[case testSubmoduleMixingImportFrom]
+import parent.child
+
+[file parent/__init__.py]
+
+[file parent/common.py]
+class SomeClass: pass
+
+[file parent/child.py]
+from parent.common import SomeClass
+from parent import common
+foo = parent.common.SomeClass()
+
+[builtins fixtures/module.pyi]
+[out]
+tmp/parent/child.py:3: error: Name 'parent' is not defined
+
+[case testSubmoduleMixingImportFromAndImport]
+import parent.child
+
+[file parent/__init__.py]
+
+[file parent/common.py]
+class SomeClass: pass
+
+[file parent/unrelated.py]
+class ShouldNotLoad: pass
+
+[file parent/child.py]
+from parent.common import SomeClass
+import parent
+
+# Note, since this might be unintuitive -- when `parent.common` is loaded in any way,
+# shape, or form, it's added to `parent`'s namespace, which is why the below line
+# succeeds.
+foo = parent.common.SomeClass()
+reveal_type(foo)
+bar = parent.unrelated.ShouldNotLoad()
+
+[builtins fixtures/module.pyi]
+[out]
+tmp/parent/child.py:8: error: Revealed type is 'parent.common.SomeClass'
+tmp/parent/child.py:9: error: "module" has no attribute "unrelated"
+
+[case testSubmoduleMixingImportFromAndImport2]
+import parent.child
+
+[file parent/__init__.py]
+
+[file parent/common.py]
+class SomeClass: pass
+
+[file parent/child.py]
+from parent import common
+import parent
+foo = parent.common.SomeClass()
+reveal_type(foo)
+
+[builtins fixtures/module.pyi]
+[out]
+tmp/parent/child.py:4: error: Revealed type is 'parent.common.SomeClass'
+
+-- Tests repeated imports
+
+[case testIdenticalImportFromTwice]
+from a import x, y, z
+from b import x, y, z
+[file a.py]
+from common import x, y, z
+[file b.py]
+from common import x, y, z
+[file common.py]
+x = 3
+def y() -> int: return 3
+class z: pass
+[out]
+
+[case testIdenticalImportStarTwice]
+from a import *
+from b import *
+[file a.py]
+from common import x, y, z
+[file b.py]
+from common import x, y, z
+[file common.py]
+x = 3
+def y() -> int: return 3
+class z: pass
+[out]
+
+[case testDifferentImportSameNameTwice]
+from a import x, y, z
+from b import x, y, z
+[file a.py]
+x = 3
+def y() -> int: return 1
+class z: pass
+[file b.py]
+x = "foo"
+def y() -> str: return "foo"
+class z: pass
+[out]
+main:2: error: Incompatible import of "x" (imported name has type "str", local name has type "int")
+main:2: error: Incompatible import of "y" (imported name has type Callable[[], str], local name has type Callable[[], int])
+main:2: error: Incompatible import of "z" (imported name has type "z" (type object), local name has type "z" (type object))
+
+-- Misc
+
+[case testInheritFromBadImport]
+# cmd: mypy -m bar
+[file foo.py]
+pass
+[file bar.py]
+from foo import B
+class C(B):
+    pass
+[out]
+tmp/bar.py:1: error: Module 'foo' has no attribute 'B'
+
+[case testImportSuppressedWhileAlmostSilent]
+# cmd: mypy -m main
+# flags: --follow-imports=error
+[file main.py]
+import mod
+[file mod.py]
+[builtins fixtures/module.pyi]
+[out]
+tmp/main.py:1: note: Import of 'mod' ignored
+tmp/main.py:1: note: (Using --follow-imports=error, module not passed on command line)
+
+[case testAncestorSuppressedWhileAlmostSilent]
+# cmd: mypy -m foo.bar
+# flags: --follow-imports=error
+[file foo/bar.py]
+[file foo/__init__.py]
+[builtins fixtures/module.pyi]
+[out]
+tmp/foo/bar.py: note: Ancestor package 'foo' ignored
+tmp/foo/bar.py: note: (Using --follow-imports=error, submodule passed on command line)
+
+[case testStubImportNonStubWhileSilent]
+# cmd: mypy -m main
+# flags: --follow-imports=skip
+[file main.py]
+from stub import x # Permitted
+from other import y # Disallowed
+x + '' # Error here
+y + '' # But not here
+[file stub.pyi]
+from non_stub import x
+[file non_stub.py]
+x = 42
+[file other.py]
+y = 42
+[builtins fixtures/module.pyi]
+[out]
+tmp/main.py:3: error: Unsupported left operand type for + ("int")
+
+[case testSilentSubmoduleImport]
+# cmd: mypy -m foo
+# flags: --follow-imports=skip
+[file foo/__init__.py]
+from foo import bar
+[file foo/bar.py]
+pass
+
+[case testSuperclassInImportCycle]
+import a
+import d
+a.A().f(d.D())
+[file a.py]
+if 0:
+    import d
+class B: pass
+class C(B): pass
+class A:
+    def f(self, x: B) -> None: pass
+[file d.py]
+import a
+class D(a.C): pass
+
+[case testSuperclassInImportCycleReversedImports]
+import d
+import a
+a.A().f(d.D())
+[file a.py]
+if 0:
+    import d
+class B: pass
+class C(B): pass
+class A:
+    def f(self, x: B) -> None: pass
+[file d.py]
+import a
+class D(a.C): pass
+
+[case testPreferPackageOverFile]
+import a
+[file a.py]
+/  # intentional syntax error -- this file shouldn't be parsed
+[file a/__init__.py]
+pass
+[out]
+
+[case testPreferPackageOverFile2]
+from a import x
+[file a.py]
+/  # intentional syntax error -- this file shouldn't be parsed
+[file a/__init__.py]
+x = 0
+[out]
+
+[case testImportInClass]
+class C:
+    import foo
+reveal_type(C.foo.bar)  # E: Revealed type is 'builtins.int'
+[file foo.py]
+bar = 0
+[builtins fixtures/module.pyi]
+[out]
+
+[case testIfFalseImport]
+if False:
+    import a
+def f(x: 'a.A') -> int:
+    return x.f()
+[file a.py]
+class A:
+    def f(self) -> int:
+        return 0
+[builtins fixtures/bool.pyi]
+
+
+-- Test stability under import cycles
+-- ----------------------------------
+
+-- The first two tests are identical except one main has 'import x'
+-- and the other 'import y'.  Previously (before build.order_ascc()
+-- was added) one of these would fail because the imports were
+-- processed in the (reverse) order in which the files were
+-- encountered.
+
+[case testImportCycleStability1]
+import x
+[file x.py]
+def f() -> str: return ''
+class Base:
+    attr = f()
+def foo():
+    import y
+[file y.py]
+import x
+class Sub(x.Base):
+    attr = x.Base.attr
+[out]
+
+[case testImportCycleStability2]
+import y
+[file x.py]
+def f() -> str: return ''
+class Base:
+    attr = f()
+def foo():
+    import y
+[file y.py]
+import x
+class Sub(x.Base):
+    attr = x.Base.attr
+[out]
+
+-- This case isn't fixed by order_ascc(), but is fixed by the
+-- lightweight type inference added to semanal.py
+-- (analyze_simple_literal_type()).
+
+[case testImportCycleStability3]
+import y
+[file x.py]
+class Base:
+    pass
+def foo() -> int:
+    import y
+    reveal_type(y.Sub.attr)
+    return y.Sub.attr
+[file y.py]
+import x
+class Sub(x.Base):
+    attr = 0
+[out]
+tmp/x.py:5: error: Revealed type is 'builtins.int'
+
+-- This case has a symmetrical cycle, so it doesn't matter in what
+-- order the files are processed.  It depends on the lightweight type
+-- interference.
+
+[case testImportCycleStability4]
+import x
+[file x.py]
+import y
+class C:
+    attr = ''
+def foo() -> int:
+    return y.D.attr
+[file y.py]
+import x
+class D:
+    attr = 0
+def bar() -> str:
+    return x.C.attr
+
+-- These cases test all supported literal types.
+
+[case testImportCycleStability5]
+import y
+[file x.py]
+class Base:
+    pass
+def foo() -> None:
+    import y
+    i = y.Sub.iattr  # type: int
+    f = y.Sub.fattr  # type: float
+    s = y.Sub.sattr  # type: str
+    b = y.Sub.battr  # type: bytes
+[file y.py]
+import x
+class Sub(x.Base):
+    iattr = 0
+    fattr = 0.0
+    sattr = ''
+    battr = b''
+[out]
+
+[case testImportCycleStability6_python2]
+import y
+[file x.py]
+class Base:
+    pass
+def foo():
+    # type: () -> None
+    import y
+    i = y.Sub.iattr  # type: int
+    f = y.Sub.fattr  # type: float
+    s = y.Sub.sattr  # type: str
+    u = y.Sub.uattr  # type: unicode
+[file y.py]
+import x
+class Sub(x.Base):
+    iattr = 0
+    fattr = 0.0
+    sattr = ''
+    uattr = u''
+[out]
+
+-- This case tests module-level variables.
+
+[case testImportCycleStability7]
+import x
+[file x.py]
+def foo() -> int:
+    import y
+    reveal_type(y.value)
+    return y.value
+[file y.py]
+import x
+value = 12
+[out]
+tmp/x.py:3: error: Revealed type is 'builtins.int'
+
+-- This is not really cycle-related but still about the lightweight
+-- type checker.
+
+[case testImportCycleStability8]
+x = 1  # type: str
+reveal_type(x)
+[out]
+main:1: error: Incompatible types in assignment (expression has type "int", variable has type "str")
+main:2: error: Revealed type is 'builtins.str'
+
+-- Tests for cross-module second_pass checking.
+
+[case testSymmetricImportCycle1]
+import a
+[file a.py]
+import b
+def f() -> int:
+    return b.x
+y = 0 + 0
+[file b.py]
+import a
+def g() -> int:
+    reveal_type(a.y)
+    return a.y
+x = 1 + 1
+[out]
+tmp/b.py:3: error: Revealed type is 'builtins.int'
+
+[case testSymmetricImportCycle2]
+import b
+[file a.py]
+import b
+def f() -> int:
+    reveal_type(b.x)
+    return b.x
+y = 0 + 0
+[file b.py]
+import a
+def g() -> int:
+    return a.y
+x = 1 + 1
+[out]
+tmp/a.py:3: error: Revealed type is 'builtins.int'
+
+[case testThreePassesRequired]
+import b
+[file a.py]
+import b
+class C:
+    def f1(self) -> None:
+        self.x2
+    def f2(self) -> None:
+        self.x2 = b.b
+[file b.py]
+import a
+b = 1 + 1
+[out]
+tmp/a.py:4: error: Cannot determine type of 'x2'
+
+[case testErrorInPassTwo1]
+import b
+[file a.py]
+import b
+def f() -> None:
+    a = b.x + 1
+    a + ''
+[file b.py]
+import a
+x = 1 + 1
+[out]
+tmp/a.py:4: error: Unsupported operand types for + ("int" and "str")
+
+[case testErrorInPassTwo2]
+import a
+[file a.py]
+import b
+def f() -> None:
+    a = b.x + 1
+    a + ''
+[file b.py]
+import a
+x = 1 + 1
+[out]
+tmp/a.py:4: error: Unsupported operand types for + ("int" and "str")
+
+[case testDeferredDecorator]
+import a
+[file a.py]
+import b
+def g() -> None:
+    f('')
+ at b.deco
+def f(a: str) -> int: pass
+reveal_type(f)
+x = 1 + 1
+[file b.py]
+from typing import Callable, TypeVar
+import a
+T = TypeVar('T')
+def deco(f: Callable[[T], int]) -> Callable[[T], int]:
+    a.x
+    return f
+[out]
+tmp/a.py:6: error: Revealed type is 'def (builtins.str*) -> builtins.int'
+
+[case testDeferredClassContext]
+class A:
+    def f(self) -> str: return 'foo'
+class B(A):
+    def f(self) -> str: return self.x
+    def initialize(self): self.x = 'bar'
+[out]
+
+
+-- Scripts and __main__
+
+[case testScriptsAreModules]
+# flags: --scripts-are-modules
+[file a]
+pass
+[file b]
+pass
+
+[case testScriptsAreNotModules]
+# cmd: mypy a b
+[file a]
+pass
+[file b]
+pass
+[out]
+
+[case testTypeCheckPrio]
+# cmd: mypy -m part1 part2 part3 part4
+
+[file part1.py]
+from part3 import Thing
+class FirstThing: pass
+
+[file part2.py]
+from part4 import part4_thing as Thing
+
+[file part3.py]
+from part2 import Thing
+reveal_type(Thing)
+
+[file part4.py]
+from typing import TYPE_CHECKING
+if TYPE_CHECKING:
+    from part1 import FirstThing
+def part4_thing(a: int) -> str: pass
+
+[builtins fixtures/bool.pyi]
+[out]
+tmp/part3.py:2: error: Revealed type is 'def (a: builtins.int) -> builtins.str'
+
+[case testImportStarAliasAnyList]
+import bar
+
+[file bar.py]
+from foo import *
+def bar(y: AnyAlias) -> None:  pass
+
+l = None # type: ListAlias[int]
+reveal_type(l)
+
+[file foo.py]
+from typing import Any, List
+AnyAlias = Any
+ListAlias = List
+[builtins fixtures/list.pyi]
+[out]
+tmp/bar.py:5: error: Revealed type is 'builtins.list[builtins.int]'
+
+[case testImportStarAliasSimpleGeneric]
+from ex2a import *
+
+def do_something(dic: Row) -> None:
+    pass
+
+def do_another() -> Row:
+    return {}
+
+do_something({'good': 'bad'}) # E: List item 0 has incompatible type "Tuple[str, str]"
+reveal_type(do_another()) # E: Revealed type is 'builtins.dict[builtins.str, builtins.int]'
+
+[file ex2a.py]
+from typing import Dict
+Row = Dict[str, int]
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testImportStarAliasGeneric]
+from y import *
+notes = None  # type: G[X]
+another = G[X]()
+second = XT[str]()
+last = XT[G]()
+
+reveal_type(notes) # E: Revealed type is 'y.G[y.G[builtins.int]]'
+reveal_type(another) # E: Revealed type is 'y.G[y.G*[builtins.int]]'
+reveal_type(second) # E: Revealed type is 'y.G[builtins.str*]'
+reveal_type(last) # E: Revealed type is 'y.G[y.G*]'
+
+[file y.py]
+from typing import Generic, TypeVar
+
+T = TypeVar('T')
+
+class G(Generic[T]):
+    pass
+
+X = G[int]
+XT = G[T]
+[out]
+
+[case testImportStarAliasCallable]
+from foo import *
+from typing import Any
+
+def bar(x: Any, y: AnyCallable) -> Any:
+    return 'foo'
+
+cb = None # type: AnyCallable
+reveal_type(cb) # E: Revealed type is 'def (*Any, **Any) -> Any'
+
+[file foo.py]
+from typing import Callable, Any
+AnyCallable = Callable[..., Any]
+[out]
diff --git a/test-data/unit/check-multiple-inheritance.test b/test-data/unit/check-multiple-inheritance.test
new file mode 100644
index 0000000..678ccad
--- /dev/null
+++ b/test-data/unit/check-multiple-inheritance.test
@@ -0,0 +1,242 @@
+-- Test cases for multiple inheritance.
+--
+-- Related: check-abstract.test
+
+
+-- No name collisions
+-- ------------------
+
+
+[case testSimpleMultipleInheritanceAndMethods]
+import typing
+class A:
+    def f(self, x: int) -> None: pass
+class B:
+    def g(self, x: str) -> None: pass
+class C(A, B): pass
+c = C()
+c.f(1)
+c.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+c.g('')
+c.g(1)  # E: Argument 1 to "g" of "B" has incompatible type "int"; expected "str"
+
+[case testSimpleMultipleInheritanceAndMethods2]
+import typing
+class A:
+    def f(self, x: int) -> None: pass
+class B:
+    def g(self, x): pass
+class C(A, B): pass
+c = C()
+c.f(1)
+c.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+c.g('')
+c.g(1)
+
+[case testSimpleMultipleInheritanceAndInstanceVariables]
+import typing
+class A:
+    def f(self) -> None:
+        self.x = 1
+class B:
+    def g(self) -> None:
+        self.y = ''
+class C(A, B): pass
+c = C()
+c.x = 1
+c.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+c.y = ''
+c.y = 1  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+[case testSimpleMultipleInheritanceAndInstanceVariableInClassBody]
+import typing
+class A:
+    x = 1
+class B:
+    y = ''
+class C(A, B): pass
+c = C()
+c.x = 1
+c.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+c.y = ''
+c.y = 1  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+[case testSimpleMultipleInheritanceAndClassVariable]
+import typing
+class A:
+    x = 1
+class B:
+    y = ''
+class C(A, B): pass
+C.x = 1
+C.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+C.y = ''
+C.y = 1  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+
+-- Name collisions
+-- ---------------
+
+
+[case testMethodNameCollisionInMultipleInheritanceWithValidSigs]
+import typing
+class A:
+    def f(self, x: int) -> None: pass
+class B:
+    def f(self, x: int) -> None: pass
+class C(A, B): pass
+c = C()
+c.f(1)
+c.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+
+[case testInstanceVarNameOverlapInMultipleInheritanceWithCompatibleTypes]
+import typing
+class A:
+    def f(self) -> None:
+        self.x = 1
+class B:
+    def g(self) -> None:
+        self.x = 1
+class C(A, B): pass
+c = C()
+c.x = 1
+c.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testClassVarNameOverlapInMultipleInheritanceWithCompatibleTypes]
+import typing
+class A:
+    x = 1
+class B:
+    x = 1
+class C(A, B): pass
+c = C()
+c.x = 1
+c.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+C.x = 1
+C.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testMethodNameCollisionInMultipleInheritanceWithIncompatibleSigs]
+import typing
+class A:
+    def f(self, x: int) -> None: pass
+class B:
+    def f(self, x: str) -> None: pass
+class C(A, B): pass
+[out]
+main:6: error: Definition of "f" in base class "A" is incompatible with definition in base class "B"
+
+[case testMethodNameCollisionInMultipleInheritanceWithIncompatibleSigs2]
+import typing
+class A:
+    def f(self, x: int) -> None: pass
+class B:
+    def f(self, x, y): pass
+class C(A, B): pass
+class D(B, A): pass
+[out]
+main:6: error: Definition of "f" in base class "A" is incompatible with definition in base class "B"
+main:7: error: Definition of "f" in base class "B" is incompatible with definition in base class "A"
+
+
+[case testMethodOverridingWithBothDynamicallyAndStaticallyTypedMethods]
+class A:
+    def f(self) -> int: pass
+class B:
+    def f(self): pass
+class C(B, A): pass
+class D(A, B): pass
+[out]
+
+[case testInstanceVarNameOverlapInMultipleInheritanceWithInvalidTypes]
+import typing
+class A:
+    def f(self) -> None:
+        self.x = 1
+class B:
+    def g(self) -> None:
+        self.x = ''
+class C(A, B): pass
+[out]
+main:8: error: Definition of "x" in base class "A" is incompatible with definition in base class "B"
+
+[case testClassVarNameOverlapInMultipleInheritanceWithInvalidTypes]
+import typing
+class A:
+    x = 1
+class B:
+    x = ''
+class C(A, B): pass
+[out]
+main:6: error: Definition of "x" in base class "A" is incompatible with definition in base class "B"
+
+[case testMethodOverlapsWithClassVariableInMultipleInheritance]
+from typing import Callable
+class A:
+    def f(self) -> None: pass
+class B:
+    f = ''
+class C(A, B): pass
+[out]
+main:6: error: Definition of "f" in base class "A" is incompatible with definition in base class "B"
+
+[case testMethodOverlapsWithInstanceVariableInMultipleInheritance]
+from typing import Callable
+class A:
+    def f(self) -> None: pass
+class B:
+    def g(self) -> None:
+        self.f = ''
+class C(A, B): pass
+[out]
+main:7: error: Definition of "f" in base class "A" is incompatible with definition in base class "B"
+
+[case testMultipleInheritanceAndInit]
+import typing
+class A:
+    def __init__(self, x: int) -> None: pass
+class B:
+    def __init__(self) -> None: pass
+class C(A, B): pass
+
+[case testMultipleInheritanceAndDifferentButCompatibleSignatures]
+class A:
+    def clear(self): pass
+
+class B:
+    def clear(self, x=None): pass
+
+class C(B, A): pass
+class D(A, B): pass
+[out]
+main:8: error: Definition of "clear" in base class "A" is incompatible with definition in base class "B"
+
+
+-- Special cases
+-- -------------
+
+
+[case testGenericInheritanceAndOverridingWithMultipleInheritance]
+from typing import Generic, TypeVar
+T = TypeVar('T')
+class G(Generic[T]):
+    def f(self, s: int) -> 'G[T]': pass
+class A(G[int]):
+    def f(self, s: int) -> 'A': pass
+class B(A, int): pass
+
+[case testCannotDetermineTypeInMultipleInheritance]
+from typing import Callable, TypeVar
+T = TypeVar('T')
+class A(B, C):
+    def f(self): pass
+class B:
+    @dec
+    def f(self): pass
+class C:
+    @dec
+    def f(self): pass
+def dec(f: Callable[..., T]) -> Callable[..., T]:
+    return f
+[out]
+main:3: error: Cannot determine type of 'f' in base class 'B'
+main:3: error: Cannot determine type of 'f' in base class 'C'
diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test
new file mode 100644
index 0000000..71a058b
--- /dev/null
+++ b/test-data/unit/check-namedtuple.test
@@ -0,0 +1,429 @@
+[case testNamedTupleUsedAsTuple]
+from collections import namedtuple
+
+X = namedtuple('X', ['x', 'y'])
+x = None  # type: X
+a, b = x
+b = x[0]
+a = x[1]
+a, b, c = x # E: Need more than 2 values to unpack (3 expected)
+x[2] # E: Tuple index out of range
+
+[case testNamedTupleWithTupleFieldNamesUsedAsTuple]
+from collections import namedtuple
+
+X = namedtuple('X', ('x', 'y'))
+x = None  # type: X
+a, b = x
+b = x[0]
+a = x[1]
+a, b, c = x # E: Need more than 2 values to unpack (3 expected)
+x[2] # E: Tuple index out of range
+
+[case testNamedTupleNoUnderscoreFields]
+from collections import namedtuple
+
+X = namedtuple('X', 'x, _y, _z')  # E: namedtuple() field names cannot start with an underscore: _y, _z
+
+[case testNamedTupleAccessingAttributes]
+from collections import namedtuple
+
+X = namedtuple('X', ['x', 'y'])
+x = None  # type: X
+x.x
+x.y
+x.z # E: "X" has no attribute "z"
+
+
+[case testNamedTupleAttributesAreReadOnly]
+from collections import namedtuple
+
+X = namedtuple('X', ['x', 'y'])
+x = None  # type: X
+x.x = 5 # E: Property "x" defined in "X" is read-only
+x.y = 5 # E: Property "y" defined in "X" is read-only
+x.z = 5 # E: "X" has no attribute "z"
+
+class A(X): pass
+a = None  # type: A
+a.x = 5 # E: Property "x" defined in "A" is read-only
+a.y = 5 # E: Property "y" defined in "A" is read-only
+-- a.z = 5 # not supported yet
+
+
+[case testNamedTupleCreateWithPositionalArguments]
+from collections import namedtuple
+
+X = namedtuple('X', ['x', 'y'])
+x = X(1, 'x')
+x.x
+x.z      # E: "X" has no attribute "z"
+x = X(1) # E: Too few arguments for "X"
+x = X(1, 2, 3)  # E: Too many arguments for "X"
+
+[case testCreateNamedTupleWithKeywordArguments]
+from collections import namedtuple
+
+X = namedtuple('X', ['x', 'y'])
+x = X(x=1, y='x')
+x = X(1, y='x')
+x = X(x=1, z=1) # E: Unexpected keyword argument "z" for "X"
+x = X(y=1) # E: Missing positional argument "x" in call to "X"
+
+
+[case testNamedTupleCreateAndUseAsTuple]
+from collections import namedtuple
+
+X = namedtuple('X', ['x', 'y'])
+x = X(1, 'x')
+a, b = x
+a, b, c = x  # E: Need more than 2 values to unpack (3 expected)
+
+
+[case testNamedTupleWithItemTypes]
+from typing import NamedTuple
+N = NamedTuple('N', [('a', int),
+                     ('b', str)])
+n = N(1, 'x')
+s = n.a # type: str  # E: Incompatible types in assignment (expression has type "int", \
+                          variable has type "str")
+i = n.b # type: int  # E: Incompatible types in assignment (expression has type "str", \
+                          variable has type "int")
+x, y = n
+x = y  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+
+[case testNamedTupleWithTupleFieldNamesWithItemTypes]
+from typing import NamedTuple
+N = NamedTuple('N', (('a', int),
+                     ('b', str)))
+n = N(1, 'x')
+s = n.a # type: str  # E: Incompatible types in assignment (expression has type "int", \
+                          variable has type "str")
+i = n.b # type: int  # E: Incompatible types in assignment (expression has type "str", \
+                          variable has type "int")
+x, y = n
+x = y  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+
+[case testNamedTupleConstructorArgumentTypes]
+from typing import NamedTuple
+N = NamedTuple('N', [('a', int),
+                     ('b', str)])
+n = N('x', 'x') # E: Argument 1 to "N" has incompatible type "str"; expected "int"
+n = N(1, b=2)   # E: Argument 2 to "N" has incompatible type "int"; expected "str"
+N(1, 'x')
+N(b='x', a=1)
+
+[case testNamedTupleAsBaseClass]
+from typing import NamedTuple
+N = NamedTuple('N', [('a', int),
+                     ('b', str)])
+class X(N):
+    pass
+x = X(1, 2)  # E: Argument 2 to "X" has incompatible type "int"; expected "str"
+s = ''
+i = 0
+s = x.a  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+i, s = x
+s, s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+[case testNamedTupleAsBaseClass2]
+from typing import NamedTuple
+class X(NamedTuple('N', [('a', int),
+                         ('b', str)])):
+    pass
+x = X(1, 2)  # E: Argument 2 to "X" has incompatible type "int"; expected "str"
+s = ''
+i = 0
+s = x.a  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+i, s = x
+s, s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+
+[case testNamedTuplesTwoAsBaseClasses]
+from typing import NamedTuple
+A = NamedTuple('A', [('a', int)])
+B = NamedTuple('B', [('a', int)])
+class X(A, B):  # E: Class has two incompatible bases derived from tuple
+    pass
+
+
+[case testNamedTuplesTwoAsBaseClasses2]
+from typing import NamedTuple
+A = NamedTuple('A', [('a', int)])
+class X(A, NamedTuple('B', [('a', int)])): # E: Class has two incompatible bases derived from tuple
+    pass
+
+
+[case testNamedTupleSelfTypeWithNamedTupleAsBase]
+from typing import NamedTuple
+A = NamedTuple('A', [('a', int), ('b', str)])
+class B(A):
+    def f(self, x: int) -> None:
+        self.f(self.a)
+        self.f(self.b)  # E: Argument 1 to "f" of "B" has incompatible type "str"; expected "int"
+        i = 0
+        s = ''
+        i, s = self
+        i, i = self  # E: Incompatible types in assignment (expression has type "str", \
+                          variable has type "int")
+
+
+[out]
+
+[case testNamedTupleTypeReferenceToClassDerivedFrom]
+from typing import NamedTuple
+A = NamedTuple('A', [('a', int), ('b', str)])
+class B(A):
+    def f(self, x: 'B') -> None:
+        i = 0
+        s = ''
+        self = x
+        i, s = x
+        i, s = x.a, x.b
+        i, s = x.a, x.a  # E: Incompatible types in assignment (expression has type "int", \
+                              variable has type "str")
+        i, i = self  # E: Incompatible types in assignment (expression has type "str", \
+                          variable has type "int")
+
+[out]
+
+[case testNamedTupleSubtyping]
+from typing import NamedTuple, Tuple
+A = NamedTuple('A', [('a', int), ('b', str)])
+class B(A): pass
+a = A(1, '')
+b = B(1, '')
+t = None  # type: Tuple[int, str]
+b = a  # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = t  # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "A")
+b = t  # E: Incompatible types in assignment (expression has type "Tuple[int, str]", variable has type "B")
+t = a
+t = (1, '')
+t = b
+a = b
+
+
+[case testNamedTupleSimpleTypeInference]
+from typing import NamedTuple, Tuple
+A = NamedTuple('A', [('a', int)])
+l = [A(1), A(2)]
+a = A(1)
+a = l[0]
+(i,) = l[0]
+i, i = l[0]  # E: Need more than 1 value to unpack (2 expected)
+l = [A(1)]
+a = (1,)  # E: Incompatible types in assignment (expression has type "Tuple[int]", \
+               variable has type "A")
+[builtins fixtures/list.pyi]
+
+[case testNamedTupleMissingClassAttribute]
+import collections
+MyNamedTuple = collections.namedtuple('MyNamedTuple', ['spam', 'eggs'])
+MyNamedTuple.x # E: "MyNamedTuple" has no attribute "x"
+
+
+[case testNamedTupleEmptyItems]
+from typing import NamedTuple
+A = NamedTuple('A', [])
+
+
+[case testNamedTupleProperty]
+from typing import NamedTuple
+A = NamedTuple('A', [('a', int)])
+class B(A):
+    @property
+    def b(self) -> int:
+        return self.a
+class C(B): pass
+B(1).b
+C(2).b
+
+[builtins fixtures/property.pyi]
+
+[case testNamedTupleAsDict]
+from collections import namedtuple
+
+X = namedtuple('X', ['x', 'y'])
+x = None  # type: X
+reveal_type(x._asdict())  # E: Revealed type is 'builtins.dict[builtins.str, Any]'
+
+[builtins fixtures/dict.pyi]
+
+[case testNamedTupleReplace]
+from collections import namedtuple
+
+X = namedtuple('X', ['x', 'y'])
+x = None  # type: X
+reveal_type(x._replace())  # E: Revealed type is 'Tuple[Any, Any, fallback=__main__.X]'
+x._replace(y=5)
+x._replace(x=3)
+x._replace(x=3, y=5)
+x._replace(z=5)  # E: Unexpected keyword argument "z" for X._replace
+x._replace(5)  # E: Too many positional arguments for X._replace
+
+[case testNamedTupleReplaceAsClass]
+from collections import namedtuple
+
+X = namedtuple('X', ['x', 'y'])
+x = None  # type: X
+X._replace(x, x=1, y=2)
+X._replace(x=1, y=2)  # E: Missing positional argument "self" in call to X._replace
+
+
+[case testNamedTupleReplaceTyped]
+from typing import NamedTuple
+
+X = NamedTuple('X', [('x', int), ('y', str)])
+x = None  # type: X
+reveal_type(x._replace())  # E: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]'
+x._replace(x=5)
+x._replace(y=5)  # E: Argument 1 to X._replace has incompatible type "int"; expected "str"
+
+[case testNamedTupleMake]
+from typing import NamedTuple
+
+X = NamedTuple('X', [('x', int), ('y', str)])
+reveal_type(X._make([5, 'a']))  # E: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]'
+X._make('a b')  # E: Argument 1 to X._make has incompatible type "str"; expected Iterable[Any]
+
+-- # FIX: not a proper class method
+-- x = None  # type: X
+-- reveal_type(x._make([5, 'a']))  # E: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]'
+-- x._make('a b')  # E: Argument 1 to X._make has incompatible type "str"; expected Iterable[Any]
+
+[builtins fixtures/list.pyi]
+
+[case testNamedTupleFields]
+from typing import NamedTuple
+
+X = NamedTuple('X', [('x', int), ('y', str)])
+reveal_type(X._fields)  # E: Revealed type is 'Tuple[builtins.str, builtins.str]'
+
+[case testNamedTupleSource]
+from typing import NamedTuple
+
+X = NamedTuple('X', [('x', int), ('y', str)])
+reveal_type(X._source)  # E: Revealed type is 'builtins.str'
+x = None  # type: X
+reveal_type(x._source)  # E: Revealed type is 'builtins.str'
+
+[case testNamedTupleUnit]
+from typing import NamedTuple
+
+X = NamedTuple('X', [])
+x = X()  # type: X
+x._replace()
+x._fields[0]  # E: Tuple index out of range
+
+[case testNamedTupleJoinNamedTuple]
+from typing import NamedTuple
+
+X = NamedTuple('X', [('x', int), ('y', str)])
+Y = NamedTuple('Y', [('x', int), ('y', str)])
+reveal_type([X(3, 'b'), Y(1, 'a')])  # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]'
+
+[builtins fixtures/list.pyi]
+
+[case testNamedTupleJoinTuple]
+from typing import NamedTuple, Tuple
+
+X = NamedTuple('X', [('x', int), ('y', str)])
+reveal_type([(3, 'b'), X(1, 'a')])  # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]'
+reveal_type([X(1, 'a'), (3, 'b')])  # E: Revealed type is 'builtins.list[Tuple[builtins.int, builtins.str]]'
+
+[builtins fixtures/list.pyi]
+
+[case testNamedTupleFieldTypes]
+from typing import NamedTuple
+
+X = NamedTuple('X', [('x', int), ('y', str)])
+reveal_type(X._field_types)  # E: Revealed type is 'builtins.dict[builtins.str, Any]'
+x = None  # type: X
+reveal_type(x._field_types)  # E: Revealed type is 'builtins.dict[builtins.str, Any]'
+
+[builtins fixtures/dict.pyi]
+
+[case testNamedTupleAndOtherSuperclass]
+from typing import NamedTuple
+
+class A: pass
+def f(x: A) -> None: pass
+
+class B(NamedTuple('B', []), A): pass
+f(B())
+x = None  # type: A
+x = B()
+
+# Sanity check: fail if baseclass does not match
+class C: pass
+def g(x: C) -> None: pass
+class D(NamedTuple('D', []), A): pass
+
+g(D())  # E: Argument 1 to "g" has incompatible type "D"; expected "C"
+y = None  # type: C
+y = D()  # E: Incompatible types in assignment (expression has type "D", variable has type "C")
+
+[case testNamedTupleSelfTypeMethod]
+from typing import TypeVar, NamedTuple
+
+T = TypeVar('T', bound='A')
+
+class A(NamedTuple('A', [('x', str)])):
+    def member(self: T) -> T:
+        return self
+
+class B(A):
+    pass
+
+a = None  # type: A
+a = A('').member()
+b = None  # type: B
+b = B('').member()
+a = B('')
+a = B('').member()
+
+[case testNamedTupleSelfTypeReplace]
+from typing import NamedTuple, TypeVar
+A = NamedTuple('A', [('x', str)])
+reveal_type(A('hello')._replace(x=''))  # E: Revealed type is 'Tuple[builtins.str, fallback=__main__.A]'
+a = None  # type: A
+a = A('hello')._replace(x='')
+
+class B(A):
+    pass
+
+reveal_type(B('hello')._replace(x=''))  # E: Revealed type is 'Tuple[builtins.str, fallback=__main__.B]'
+b = None  # type: B
+b = B('hello')._replace(x='')
+
+[case testNamedTupleSelfTypeMake]
+from typing import NamedTuple, TypeVar
+A = NamedTuple('A', [('x', str)])
+reveal_type(A._make(['']))  # E: Revealed type is 'Tuple[builtins.str, fallback=__main__.A]'
+a = A._make([''])  # type: A
+
+class B(A):
+    pass
+
+reveal_type(B._make(['']))  # E: Revealed type is 'Tuple[builtins.str, fallback=__main__.B]'
+b = B._make([''])  # type: B
+
+[builtins fixtures/list.pyi]
+
+[case testNamedTupleInClassNamespace]
+# https://github.com/python/mypy/pull/2553#issuecomment-266474341
+from typing import NamedTuple
+class C:
+    def f(self):
+        A = NamedTuple('A', [('x', int)])
+    def g(self):
+        A = NamedTuple('A', [('y', int)])
+C.A  # E: "C" has no attribute "A"
+
+[case testNamedTupleInFunction]
+from typing import NamedTuple
+def f() -> None:
+    A = NamedTuple('A', [('x', int)])
+A  # E: Name 'A' is not defined
diff --git a/test-data/unit/check-newsyntax.test b/test-data/unit/check-newsyntax.test
new file mode 100644
index 0000000..bfbaade
--- /dev/null
+++ b/test-data/unit/check-newsyntax.test
@@ -0,0 +1,100 @@
+[case testNewSyntaxRequire36]
+# flags: --fast-parser --python-version 3.5
+x: int = 5  # E: Variable annotation syntax is only supported in Python 3.6, use type comment instead
+[out]
+
+[case testNewSyntaxSyntaxError]
+# flags: --fast-parser --python-version 3.6
+x: int: int  # E: invalid syntax
+[out]
+
+[case testNewSyntaxBasics]
+# flags: --fast-parser --python-version 3.6
+x: int
+x = 5
+y: int = 5
+
+a: str
+a = 5  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+b: str = 5  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+zzz: int
+zzz: str  # E: Name 'zzz' already defined
+[out]
+
+[case testNewSyntaxWithDict]
+# flags: --fast-parser --python-version 3.6
+from typing import Dict, Any
+
+d: Dict[int, str] = {}
+d[42] = 'ab'
+d[42] = 42  # E: Incompatible types in assignment (expression has type "int", target has type "str")
+d['ab'] = 'ab'  # E: Invalid index type "str" for "dict"; expected type "int"
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testNewSyntaxWithRevealType]
+# flags: --fast-parser --python-version 3.6
+from typing import Dict
+
+def tst_local(dct: Dict[int, T]) -> Dict[T, int]:
+    ret: Dict[T, int] = {}
+    return ret
+
+reveal_type(tst_local({1: 'a'}))  # E: Revealed type is 'builtins.dict[builtins.str*, builtins.int]'
+[builtins fixtures/dict.pyi]
+[out]
+
+[case testNewSyntaxWithInstanceVars]
+# flags: --fast-parser --python-version 3.6
+class TstInstance:
+    a: str
+    def __init__(self) -> None:
+        self.x: int
+
+TstInstance().x = 5
+TstInstance().x = 'ab'  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+TstInstance().a = 5  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+TstInstance().a = 'ab'
+[out]
+
+[case testNewSyntaxWithClassVars]
+# flags: --fast-parser --strict-optional --python-version 3.6
+class CCC:
+    a: str = None  # E: Incompatible types in assignment (expression has type None, variable has type "str")
+[out]
+
+[case testNewSyntaxWithStrictOptional]
+# flags: --fast-parser --strict-optional --python-version 3.6
+strict: int
+strict = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+strict2: int = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+[out]
+
+[case testNewSyntaxWithStrictOptionalFunctions]
+# flags: --fast-parser --strict-optional --python-version 3.6
+def f() -> None:
+    x: int
+    x = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+[out]
+
+[case testNewSyntaxWithStrictOptionalClasses]
+# flags: --fast-parser --strict-optional --python-version 3.6
+class C:
+    def meth(self) -> None:
+        x: int = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+        self.x: int = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+[out]
+
+[case testNewSyntaxSpecialAssign]
+# flags: --fast-parser --python-version 3.6
+class X:
+    x: str
+    x[0]: int
+    x.x: int
+
+[out]
+main:4: error: Unexpected type declaration
+main:4: error: Unsupported target for indexed assignment
+main:5: error: Type cannot be declared in assignment to non-self attribute
+main:5: error: "str" has no attribute "x"
diff --git a/test-data/unit/check-newtype.test b/test-data/unit/check-newtype.test
new file mode 100644
index 0000000..7ebe696
--- /dev/null
+++ b/test-data/unit/check-newtype.test
@@ -0,0 +1,324 @@
+-- Checks NewType(...)
+
+-- Checks for basic functionality
+
+[case testNewTypePEP484Example1]
+from typing import NewType
+
+UserId = NewType('UserId', int)
+
+def name_by_id(user_id: UserId) -> str:
+    return "foo"
+
+UserId('user')  # E: Argument 1 to "UserId" has incompatible type "str"; expected "int"
+name_by_id(42)  # E: Argument 1 to "name_by_id" has incompatible type "int"; expected "UserId"
+name_by_id(UserId(42))
+
+id = UserId(5)
+num = id + 1
+
+reveal_type(id)  # E: Revealed type is '__main__.UserId'
+reveal_type(num)  # E: Revealed type is 'builtins.int'
+[out]
+
+[case testNewTypePEP484Example2]
+from typing import NewType
+
+class PacketId:
+    def __init__(self, major: int, minor: int) -> None:
+        self._major = major
+        self._minor = minor
+
+TcpPacketId = NewType('TcpPacketId', PacketId)
+
+packet = PacketId(100, 100)
+tcp_packet = TcpPacketId(packet)
+tcp_packet = TcpPacketId(127, 0)
+
+[out]
+main:12: error: Too many arguments for "TcpPacketId"
+main:12: error: Argument 1 to "TcpPacketId" has incompatible type "int"; expected "PacketId"
+
+[case testNewTypeWithTuples]
+from typing import NewType, Tuple
+TwoTuple = NewType('TwoTuple', Tuple[int, str])
+a = TwoTuple((3, "a"))
+b = TwoTuple(("a", 3))  # E: Argument 1 to "TwoTuple" has incompatible type "Tuple[str, int]"; expected "Tuple[int, str]"
+
+reveal_type(a[0])  # E: Revealed type is 'builtins.int'
+reveal_type(a[1])  # E: Revealed type is 'builtins.str'
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testNewTypeWithLists]
+from typing import NewType, List
+UserId = NewType('UserId', int)
+IdList = NewType('IdList', List[UserId])
+
+bad1 = IdList([1])  # E: List item 0 has incompatible type "int"
+
+foo = IdList([])
+foo.append(3)  # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "UserId"
+foo.append(UserId(3))
+foo.extend([UserId(1), UserId(2), UserId(3)])
+foo.extend(IdList([UserId(1), UserId(2), UserId(3)]))
+bar = IdList([UserId(2)])
+
+baz = foo + bar
+reveal_type(foo)  # E: Revealed type is '__main__.IdList'
+reveal_type(bar)  # E: Revealed type is '__main__.IdList'
+reveal_type(baz)  # E: Revealed type is 'builtins.list[__main__.UserId*]'
+
+[builtins fixtures/list.pyi]
+[out]
+
+[case testNewTypeWithGenerics]
+from typing import TypeVar, Generic, NewType, Any
+
+T = TypeVar('T')
+
+class Base(Generic[T]):
+    def __init__(self, item: T) -> None:
+        self.item = item
+
+    def getter(self) -> T:
+        return self.item
+
+Derived1 = NewType('Derived1', Base[str])
+Derived2 = NewType('Derived2', Base)       # Implicit 'Any'
+Derived3 = NewType('Derived3', Base[Any])  # Explicit 'Any'
+
+Derived1(Base(1))  # E: Argument 1 to "Base" has incompatible type "int"; expected "str"
+Derived1(Base('a'))
+Derived2(Base(1))
+Derived2(Base('a'))
+Derived3(Base(1))
+Derived3(Base('a'))
+
+reveal_type(Derived1(Base('a')).getter())  # E: Revealed type is 'builtins.str*'
+reveal_type(Derived3(Base('a')).getter())  # E: Revealed type is 'Any'
+[out]
+
+[case testNewTypeWithNamedTuple]
+from collections import namedtuple
+from typing import NewType, NamedTuple
+
+Vector1 = namedtuple('Vector1', ['x', 'y'])
+Point1 = NewType('Point1', Vector1)
+p1 = Point1(Vector1(1, 2))
+reveal_type(p1.x)  # E: Revealed type is 'Any'
+reveal_type(p1.y)  # E: Revealed type is 'Any'
+
+Vector2 = NamedTuple('Vector2', [('x', int), ('y', int)])
+Point2 = NewType('Point2', Vector2)
+p2 = Point2(Vector2(1, 2))
+reveal_type(p2.x)  # E: Revealed type is 'builtins.int'
+reveal_type(p2.y)  # E: Revealed type is 'builtins.int'
+
+class Vector3:
+    def __init__(self, x: int, y: int) -> None:
+        self.x = x
+        self.y = y
+Point3 = NewType('Point3', Vector3)
+p3 = Point3(Vector3(1, 3))
+reveal_type(p3.x)  # E: Revealed type is 'builtins.int'
+reveal_type(p3.y)  # E: Revealed type is 'builtins.int'
+[out]
+
+[case testNewTypeWithCasts]
+from typing import NewType, cast
+UserId = NewType('UserId', int)
+foo = UserId(3)
+foo = cast(UserId, 3)
+foo = cast(UserId, "foo")
+foo = cast(UserId, UserId(4))
+[out]
+
+[case testNewTypeWithTypeAliases]
+from typing import NewType
+Foo = int
+Bar = NewType('Bar', Foo)
+Bar2 = Bar
+
+def func1(x: Foo) -> Bar:
+    return Bar(x)
+
+def func2(x: int) -> Bar:
+    return Bar(x)
+
+def func3(x: Bar2) -> Bar:
+    return x
+
+x = Bar(42)
+y = Bar2(42)
+
+y = func3(x)
+[out]
+
+
+-- Make sure NewType works as expected in a variety of different scopes/across files
+
+[case testNewTypeInLocalScope]
+from typing import NewType
+A = NewType('A', int)
+a = A(3)
+
+def func() -> None:
+    A = NewType('A', str)
+    B = NewType('B', str)
+
+    a = A(3)  # E: Argument 1 to "A" has incompatible type "int"; expected "str"
+    a = A('xyz')
+    b = B('xyz')
+
+class MyClass:
+    C = NewType('C', float)
+
+    def foo(self) -> 'MyClass.C':
+        return MyClass.C(3.2)
+
+b = A(3)
+c = MyClass.C(3.5)
+[out]
+
+[case testNewTypeInMultipleFiles]
+import a
+import b
+list1 = [a.UserId(1), a.UserId(2)]
+list1.append(b.UserId(3))  # E: Argument 1 to "append" of "list" has incompatible type "b.UserId"; expected "a.UserId"
+
+[file a.py]
+from typing import NewType
+UserId = NewType('UserId', int)
+
+[file b.py]
+from typing import NewType
+UserId = NewType('UserId', int)
+
+[builtins fixtures/list.pyi]
+[out]
+
+[case testNewTypeWithIncremental]
+import m
+
+[file m.py]
+from typing import NewType
+
+UserId = NewType('UserId', int)
+
+def name_by_id(user_id: UserId) -> str:
+    return "foo"
+
+name_by_id(UserId(42))
+
+id = UserId(5)
+num = id + 1
+
+[file m.py.next]
+from typing import NewType
+
+UserId = NewType('UserId', int)
+
+def name_by_id(user_id: UserId) -> str:
+    return "foo"
+
+name_by_id(UserId(42))
+
+id = UserId(5)
+num = id + 1
+
+reveal_type(id)
+reveal_type(num)
+[rechecked m]
+[stale]
+[out1]
+[out2]
+tmp/m.py:13: error: Revealed type is 'm.UserId'
+tmp/m.py:14: error: Revealed type is 'builtins.int'
+
+
+-- Check misuses of NewType fail
+
+[case testNewTypeBadInitializationFails]
+from typing import NewType
+
+a = NewType('b', int)  # E: String argument 1 'b' to NewType(...) does not match variable name 'a'
+b = NewType('b', 3)  # E: Argument 2 to NewType(...) must be a valid type
+c = NewType(2, int)  # E: Argument 1 to NewType(...) must be a string literal
+foo = "d"
+d = NewType(foo, int)  # E: Argument 1 to NewType(...) must be a string literal
+e = NewType(name='e', tp=int)  # E: NewType(...) expects exactly two positional arguments
+f = NewType('f', tp=int)  # E: NewType(...) expects exactly two positional arguments
+[out]
+
+[case testNewTypeWithAnyFails]
+from typing import NewType, Any
+A = NewType('A', Any)  # E: Argument 2 to NewType(...) must be subclassable (got Any)
+[out]
+
+[case testNewTypeWithUnionsFails]
+from typing import NewType, Union
+Foo = NewType('Foo', Union[int, float])  # E: Argument 2 to NewType(...) must be subclassable (got Union[builtins.int, builtins.float])
+[out]
+
+[case testNewTypeWithTypeTypeFails]
+from typing import NewType, Type
+Foo = NewType('Foo', Type[int])  # E: Argument 2 to NewType(...) must be subclassable (got Type[builtins.int])
+a = Foo(type(3))
+[builtins fixtures/args.pyi]
+[out]
+
+[case testNewTypeWithTypeVarsFails]
+from typing import NewType, TypeVar, List
+T = TypeVar('T')
+A = NewType('A', T)
+B = NewType('B', List[T])
+[builtins fixtures/list.pyi]
+[out]
+main:3: error: Argument 2 to NewType(...) must be subclassable (got T?)
+main:3: error: Invalid type "__main__.T"
+main:4: error: Invalid type "__main__.T"
+
+[case testNewTypeWithNewTypeFails]
+from typing import NewType
+A = NewType('A', int)
+B = NewType('B', A)  # E: Argument 2 to NewType(...) cannot be another NewType
+C = A
+D = C
+E = NewType('E', D)  # E: Argument 2 to NewType(...) cannot be another NewType
+[out]
+
+[case testNewTypeRedefiningVariablesFails]
+from typing import NewType
+
+a = 3
+a = NewType('a', int)
+
+b = NewType('b', int)
+b = NewType('b', float)  # this line throws two errors
+
+c = NewType('c', str)   # type: str
+[out]
+main:4: error: Cannot redefine 'a' as a NewType
+main:7: error: Invalid assignment target
+main:7: error: Cannot redefine 'b' as a NewType
+main:9: error: Cannot declare the type of a NewType declaration
+
+[case testNewTypeAddingExplicitTypesFails]
+from typing import NewType
+UserId = NewType('UserId', int)
+
+a = 3  # type: UserId  # E: Incompatible types in assignment (expression has type "int", variable has type "UserId")
+[out]
+
+[case testNewTypeTestSubclassingFails]
+from typing import NewType
+class A: pass
+B = NewType('B', A)
+class C(B): pass  # E: Cannot subclass NewType
+[out]
+
+[case testNewTypeAny]
+from typing import NewType
+Any = NewType('Any', int)
+Any(5)
diff --git a/test-data/unit/check-optional.test b/test-data/unit/check-optional.test
new file mode 100644
index 0000000..09fb4b5
--- /dev/null
+++ b/test-data/unit/check-optional.test
@@ -0,0 +1,546 @@
+-- Tests for strict Optional behavior
+
+[case testImplicitNoneType]
+x = None
+x()  # E: None not callable
+
+[case testExplicitNoneType]
+x = None  # type: None
+x()  # E: None not callable
+
+[case testNoneMemberOfOptional]
+from typing import Optional
+x = None  # type: Optional[int]
+
+[case testTypeMemberOfOptional]
+from typing import Optional
+x = 0  # type: Optional[int]
+
+[case testNoneNotMemberOfType]
+x = None  # type: int
+[out]
+main:1: error: Incompatible types in assignment (expression has type None, variable has type "int")
+
+[case testTypeNotMemberOfNone]
+x = 0  # type: None
+[out]
+main:1: error: Incompatible types in assignment (expression has type "int", variable has type None)
+
+[case testOptionalNotMemberOfType]
+from typing import Optional
+def f(a: int) -> None: pass
+x = None  # type:  Optional[int]
+f(x)  # E: Argument 1 to "f" has incompatible type "Optional[int]"; expected "int"
+
+[case testIsinstanceCases]
+from typing import Optional
+x = None  # type:  Optional[int]
+if isinstance(x, int):
+  reveal_type(x)  # E: Revealed type is 'builtins.int'
+else:
+  reveal_type(x)  # E: Revealed type is 'builtins.None'
+[builtins fixtures/isinstance.pyi]
+
+[case testIfCases]
+from typing import Optional
+x = None  # type:  Optional[int]
+if x:
+  reveal_type(x)  # E: Revealed type is 'builtins.int'
+else:
+  reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.None]'
+[builtins fixtures/bool.pyi]
+
+[case testIfNotCases]
+from typing import Optional
+x = None  # type:  Optional[int]
+if not x:
+  reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.None]'
+else:
+  reveal_type(x)  # E: Revealed type is 'builtins.int'
+[builtins fixtures/bool.pyi]
+
+[case testIsNotNoneCases]
+from typing import Optional
+x = None  # type:  Optional[int]
+if x is not None:
+  reveal_type(x)  # E: Revealed type is 'builtins.int'
+else:
+  reveal_type(x)  # E: Revealed type is 'builtins.None'
+[builtins fixtures/bool.pyi]
+
+[case testIsNoneCases]
+from typing import Optional
+x = None  # type:  Optional[int]
+if x is None:
+  reveal_type(x)  # E: Revealed type is 'builtins.None'
+else:
+  reveal_type(x)  # E: Revealed type is 'builtins.int'
+[builtins fixtures/bool.pyi]
+
+[case testOrCases]
+from typing import Optional
+x = None  # type: Optional[str]
+y1 = x or 'a'
+reveal_type(y1)  # E: Revealed type is 'builtins.str'
+y2 = x or 1
+reveal_type(y2)  # E: Revealed type is 'Union[builtins.str, builtins.int]'
+z1 = 'a' or x
+reveal_type(z1)  # E: Revealed type is 'Union[builtins.str, builtins.None]'
+z2 = int() or x
+reveal_type(z2)  # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.None]'
+
+[case testAndCases]
+from typing import Optional
+x = None  # type: Optional[str]
+y1 = x and 'b'
+reveal_type(y1)  # E: Revealed type is 'Union[builtins.str, builtins.None]'
+y2 = x and 1  # x could be '', so...
+reveal_type(y2)  # E: Revealed type is 'Union[builtins.str, builtins.None, builtins.int]'
+z1 = 'b' and x
+reveal_type(z1)  # E: Revealed type is 'Union[builtins.str, builtins.None]'
+z2 = int() and x
+reveal_type(z2)  # E: Revealed type is 'Union[builtins.int, builtins.str, builtins.None]'
+
+[case testLambdaReturningNone]
+f = lambda: None
+x = f()  # E: Function does not return a value
+
+[case testNoneArgumentType]
+def f(x: None) -> None: pass
+f(None)
+
+[case testInferOptionalFromDefaultNone]
+def f(x: int = None) -> None:
+  x + 1  # E: Unsupported left operand type for + (some union)
+f(None)
+[out]
+
+[case testInferOptionalFromDefaultNoneWithFastParser]
+# flags: --fast-parser
+def f(x: int = None) -> None:
+  x + 1  # E: Unsupported left operand type for + (some union)
+f(None)
+[out]
+
+[case testInferOptionalFromDefaultNoneComment]
+def f(x=None):
+  # type: (int) -> None
+  x + 1  # E: Unsupported left operand type for + (some union)
+f(None)
+[out]
+
+[case testInferOptionalFromDefaultNoneCommentWithFastParser]
+# flags: --fast-parser
+def f(x=None):
+  # type: (int) -> None
+  x + 1  # E: Unsupported left operand type for + (some union)
+f(None)
+[out]
+
+[case testInferOptionalType]
+x = None
+if bool():
+  # scope limit assignment
+  x = 1
+  # in scope of the assignment, x is an int
+  reveal_type(x)  # E: Revealed type is 'builtins.int'
+# out of scope of the assignment, it's an Optional[int]
+reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.None]'
+[builtins fixtures/bool.pyi]
+
+[case testInferOptionalTypeLocallyBound]
+x = None
+x = 1
+reveal_type(x)  # E: Revealed type is 'builtins.int'
+
+
+[case testInferOptionalTypeFromOptional]
+from typing import Optional
+y = None  # type: Optional[int]
+x = None
+x = y
+reveal_type(x)  # E: Revealed type is 'Union[builtins.int, builtins.None]'
+
+[case testInferOptionalListType]
+x = [None]
+x.append(1)  # E: Argument 1 to "append" of "list" has incompatible type "int"; expected None
+[builtins fixtures/list.pyi]
+
+[case testInferNonOptionalListType]
+x = []
+x.append(1)
+x()  # E: List[int] not callable
+[builtins fixtures/list.pyi]
+
+[case testInferOptionalDictKeyValueTypes]
+x = {None: None}
+x["bar"] = 1
+[builtins fixtures/dict.pyi]
+[out]
+main:2: error: Invalid index type "str" for "dict"; expected type None
+main:2: error: Incompatible types in assignment (expression has type "int", target has type None)
+
+[case testInferNonOptionalDictType]
+x = {}
+x["bar"] = 1
+x()  # E: Dict[str, int] not callable
+[builtins fixtures/dict.pyi]
+
+[case testNoneClassVariable]
+from typing import Optional
+class C:
+    x = None  # type: int
+    def __init__(self) -> None:
+        self.x = 0
+
+[case testNoneClassVariableInInit]
+from typing import Optional
+class C:
+    x = None  # type: int
+    def __init__(self) -> None:
+        self.x = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+[out]
+
+[case testMultipleAssignmentNoneClassVariableInInit]
+from typing import Optional
+class C:
+    x, y = None, None  # type: int, str
+    def __init__(self) -> None:
+        self.x = None  # E: Incompatible types in assignment (expression has type None, variable has type "int")
+        self.y = None  # E: Incompatible types in assignment (expression has type None, variable has type "str")
+[out]
+
+[case testOverloadWithNone]
+from typing import overload
+ at overload
+def f(x: None) -> str: pass
+ at overload
+def f(x: int) -> int: pass
+reveal_type(f(None))  # E: Revealed type is 'builtins.str'
+reveal_type(f(0))  # E: Revealed type is 'builtins.int'
+
+[case testOptionalTypeOrTypePlain]
+from typing import Optional
+def f(a: Optional[int]) -> int:
+    return a or 0
+[out]
+
+[case testOptionalTypeOrTypeTypeVar]
+from typing import Optional, TypeVar
+T = TypeVar('T')
+def f(a: Optional[T], b: T) -> T:
+    return a or b
+[out]
+
+[case testOptionalTypeOrTypeBothOptional]
+from typing import Optional
+def f(a: Optional[int], b: Optional[int]) -> None:
+    reveal_type(a or b)
+def g(a: int, b: Optional[int]) -> None:
+    reveal_type(a or b)
+[out]
+main:3: error: Revealed type is 'Union[builtins.int, builtins.None]'
+main:5: error: Revealed type is 'Union[builtins.int, builtins.None]'
+
+[case testOptionalTypeOrTypeComplexUnion]
+from typing import Union
+def f(a: Union[int, str, None]) -> None:
+    reveal_type(a or 'default')
+[out]
+main:3: error: Revealed type is 'Union[builtins.int, builtins.str]'
+
+[case testOptionalTypeOrTypeNoTriggerPlain]
+from typing import Optional
+def f(a: Optional[int], b: int) -> int:
+    return b or a
+[out]
+main:3: error: Incompatible return value type (got "Optional[int]", expected "int")
+
+[case testOptionalTypeOrTypeNoTriggerTypeVar]
+from typing import Optional, TypeVar
+T = TypeVar('T')
+def f(a: Optional[T], b: T) -> T:
+    return b or a
+[out]
+main:4: error: Incompatible return value type (got "Optional[T]", expected "T")
+
+[case testNoneOrStringIsString]
+def f() -> str:
+    a = None
+    b = ''
+    return a or b
+[out]
+
+[case testNoneOrTypeVarIsTypeVar]
+from typing import TypeVar
+T = TypeVar('T')
+def f(b: T) -> T:
+    a = None
+    return a or b
+[out]
+
+[case testYieldNothingInFunctionReturningGenerator]
+from typing import Generator
+def f() -> Generator[None, None, None]:
+    yield
+[out]
+
+[case testNoneAndStringIsNone]
+a = None
+b = "foo"
+reveal_type(a and b)  # E: Revealed type is 'builtins.None'
+
+[case testNoneMatchesObjectInOverload]
+import a
+a.f(None)
+
+[file a.pyi]
+from typing import overload
+ at overload
+def f() -> None: ...
+ at overload
+def f(o: object) -> None: ...
+
+[case testGenericSubclassReturningNone]
+from typing import Generic, TypeVar
+
+T = TypeVar('T')
+
+class Base(Generic[T]):
+  def f(self) -> T:
+    pass
+
+class SubNone(Base[None]):
+  def f(self) -> None:
+    pass
+
+class SubInt(Base[int]):
+  def f(self) -> int:
+    return 1
+
+[case testUseOfNoneReturningFunction]
+from typing import Optional
+def f() -> None:
+    pass
+
+def g(x: Optional[int]) -> int:
+  pass
+
+x = f()  # E: Function does not return a value
+f() + 1  # E: Function does not return a value
+g(f())  # E: Function does not return a value
+
+[case testEmptyReturn]
+def f() -> None:
+    return
+
+[case testReturnNone]
+def f() -> None:
+    return None
+
+[case testNoneCallable]
+from typing import Callable
+def f() -> None: pass
+x = f  # type: Callable[[], None]
+
+[case testOptionalCallable]
+from typing import Callable, Optional
+T = Optional[Callable[..., None]]
+
+[case testAnyTypeInPartialTypeList]
+# flags: --check-untyped-defs
+def f(): ...
+
+def lookup_field(name, obj):
+    try:
+        pass
+    except:
+        attr = f()
+    else:
+        attr = None
+
+[case testTernaryWithNone]
+reveal_type(None if bool() else 0)  # E: Revealed type is 'Union[builtins.int, builtins.None]'
+[builtins fixtures/bool.pyi]
+
+[case testListWithNone]
+reveal_type([0, None, 0])    # E: Revealed type is 'builtins.list[Union[builtins.int, builtins.None]]'
+[builtins fixtures/list.pyi]
+
+[case testOptionalWhitelistSuppressesOptionalErrors]
+# flags: --strict-optional-whitelist
+import a
+import b
+[file a.py]
+from typing import Optional
+x = None  # type: Optional[str]
+x + "foo"
+
+[file b.py]
+from typing import Optional
+x = None  # type: Optional[int]
+x + 1
+
+[case testOptionalWhitelistPermitsOtherErrors]
+# flags: --strict-optional-whitelist
+import a
+import b
+[file a.py]
+from typing import Optional
+x = None  # type: Optional[str]
+x + "foo"
+
+[file b.py]
+from typing import Optional
+x = None  # type: Optional[int]
+x + 1
+1 + "foo"
+[out]
+tmp/b.py:4: error: Unsupported operand types for + ("int" and "str")
+
+[case testOptionalWhitelistPermitsWhitelistedFiles]
+# flags: --strict-optional-whitelist **/a.py
+import a
+import b
+[file a.py]
+from typing import Optional
+x = None  # type: Optional[str]
+x + "foo"
+
+[file b.py]
+from typing import Optional
+x = None  # type: Optional[int]
+x + 1
+[out]
+tmp/a.py:3: error: Unsupported left operand type for + (some union)
+
+[case testNoneContextInference]
+from typing import Dict, List
+def f() -> List[None]:
+    return []
+def g() -> Dict[None, None]:
+    return {}
+[builtins fixtures/dict.pyi]
+
+[case testRaiseFromNone]
+raise BaseException from None
+[builtins fixtures/exception.pyi]
+
+[case testOptionalNonPartialTypeWithNone]
+from typing import Generator
+def f() -> Generator[str, None, None]: pass
+x = f()
+reveal_type(x)  # E: Revealed type is 'typing.Generator[builtins.str, builtins.None, builtins.None]'
+l = [f()]
+reveal_type(l)  # E: Revealed type is 'builtins.list[typing.Generator*[builtins.str, builtins.None, builtins.None]]'
+[builtins fixtures/list.pyi]
+
+[case testNoneListTernary]
+x = [None] if "" else [1]  # E: List item 0 has incompatible type "int"
+[builtins fixtures/list.pyi]
+
+[case testInferEqualsNotOptional]
+from typing import Optional
+x = ''  # type: Optional[str]
+if x == '<string>':
+    reveal_type(x)  # E: Revealed type is 'builtins.str'
+else:
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.str, builtins.None]'
+[builtins fixtures/ops.pyi]
+
+[case testInferEqualsNotOptionalWithUnion]
+from typing import Union
+x = ''  # type: Union[str, int, None]
+if x == '<string>':
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.str, builtins.int]'
+else:
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.str, builtins.int, builtins.None]'
+[builtins fixtures/ops.pyi]
+
+[case testInferEqualsNotOptionalWithOverlap]
+from typing import Union
+x = ''  # type: Union[str, int, None]
+if x == object():
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.str, builtins.int]'
+else:
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.str, builtins.int, builtins.None]'
+[builtins fixtures/ops.pyi]
+
+[case testInferEqualsStillOptionalWithNoOverlap]
+from typing import Optional
+x = ''  # type: Optional[str]
+if x == 0:
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.str, builtins.None]'
+else:
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.str, builtins.None]'
+[builtins fixtures/ops.pyi]
+
+[case testInferEqualsStillOptionalWithBothOptional]
+from typing import Union
+x = ''  # type: Union[str, int, None]
+y = ''  # type: Union[str, None]
+if x == y:
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.str, builtins.int, builtins.None]'
+else:
+    reveal_type(x)  # E: Revealed type is 'Union[builtins.str, builtins.int, builtins.None]'
+[builtins fixtures/ops.pyi]
+
+[case testWarnNoReturnWorksWithStrictOptional]
+# flags: --warn-no-return
+def f() -> None:
+  1 + 1  # no error
+
+def g() -> int:
+  1 + 1  #
+[out]
+main:5: note: Missing return statement
+
+[case testGenericTypeAliasesOptional]
+from typing import TypeVar, Generic, Optional
+T = TypeVar('T')
+class Node(Generic[T]):
+    def __init__(self, x: T) -> None:
+        self.x = x
+
+ONode = Optional[Node[T]]
+def f(x: T) -> ONode[T]:
+    if 1 > 0:
+        return Node(x)
+    else:
+        return None
+
+x = None # type: ONode[int]
+x = f(1)
+x = f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+x.x = 1 # E: Some element of union has no attribute "x"
+if x is not None:
+    x.x = 1 # OK here
+
+[builtins fixtures/ops.pyi]
+
+[case testOptionalLambdaInference]
+from typing import Optional, Callable
+f = None # type: Optional[Callable[[int], None]]
+f = lambda x: None
+f(0)
+[builtins fixtures/function.pyi]
+
+[case testUnionSimplificationWithStrictOptional]
+from typing import Any, TypeVar, Union
+class C(Any): pass
+T = TypeVar('T')
+S = TypeVar('S')
+def u(x: T, y: S) -> Union[S, T]: pass
+a = None # type: Any
+
+# Test both orders
+reveal_type(u(C(), None))  # E: Revealed type is 'Union[builtins.None, __main__.C*]'
+reveal_type(u(None, C()))  # E: Revealed type is 'Union[__main__.C*, builtins.None]'
+
+# This will be fixed later
+reveal_type(u(a, None))  # E: Revealed type is 'Any'
+reveal_type(u(None, a))  # E: Revealed type is 'Any'
+
+reveal_type(u(1, None))  # E: Revealed type is 'Union[builtins.None, builtins.int*]'
+reveal_type(u(None, 1))  # E: Revealed type is 'Union[builtins.int*, builtins.None]'
diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test
new file mode 100644
index 0000000..e173fb2
--- /dev/null
+++ b/test-data/unit/check-overloading.test
@@ -0,0 +1,759 @@
+-- Test cases for function overloading
+
+
+[case testTypeCheckOverloadedFunctionBody]
+from typing import overload
+ at overload
+def f(x: 'A'):
+    x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    x = A()
+ at overload
+def f(x: 'B'):
+    x = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+    x = B()
+class A: pass
+class B: pass
+[out]
+
+[case testTypeCheckOverloadedMethodBody]
+from typing import overload
+class A:
+    @overload
+    def f(self, x: 'A'):
+        x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+        x = A()
+    @overload
+    def f(self, x: 'B'):
+        x = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+        x = B()
+class B: pass
+[out]
+
+[case testCallToOverloadedFunction]
+from typing import overload
+f(C()) # E: No overload variant of "f" matches argument types [__main__.C]
+f(A())
+f(B())
+
+ at overload
+def f(x: 'A') -> None: pass
+ at overload
+def f(x: 'B') -> None: pass
+
+class A: pass
+class B: pass
+class C: pass
+
+[case testOverloadedFunctionReturnValue]
+from typing import overload
+a, b = None, None # type: (A, B)
+b = f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a = f(a)
+b = f(b)
+
+ at overload
+def f(x: 'A') -> 'A': pass
+ at overload
+def f(x: 'B') -> 'B': pass
+class A: pass
+class B: pass
+
+[case testCallToOverloadedMethod]
+from typing import overload
+A().f(C()) # E: No overload variant of "f" of "A" matches argument types [__main__.C]
+A().f(A())
+A().f(B())
+
+class A:
+  @overload
+  def f(self, x: 'A') -> None: pass
+  @overload
+  def f(self, x: 'B') -> None: pass
+
+class B: pass
+class C: pass
+
+[case testOverloadedMethodReturnValue]
+from typing import overload
+a, b = None, None # type: (A, B)
+b = a.f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = a.f(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a = a.f(a)
+b = a.f(b)
+
+class A:
+  @overload
+  def f(self, x: 'A') -> 'A': pass
+  @overload
+  def f(self, x: 'B') -> 'B': pass
+class B: pass
+
+[case testOverloadsWithDifferentArgumentCounts]
+from typing import overload
+a, b = None, None # type: (A, B)
+a = f(a)
+b = f(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+f(b)     # E: No overload variant of "f" matches argument types [__main__.B]
+b = f(b, a)
+a = f(b, a) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+f(a, a)     # E: No overload variant of "f" matches argument types [__main__.A, __main__.A]
+f(b, b)     # E: No overload variant of "f" matches argument types [__main__.B, __main__.B]
+
+ at overload
+def f(x: 'A') -> 'A': pass
+ at overload
+def f(x: 'B', y: 'A') -> 'B': pass
+class A: pass
+class B: pass
+
+[case testGenericOverloadVariant]
+from typing import overload, TypeVar, Generic
+t = TypeVar('t')
+ab, ac, b, c = None, None, None, None # type: (A[B], A[C], B, C)
+b = f(ab)
+c = f(ac)
+b = f(ac) # E: Incompatible types in assignment (expression has type "C", variable has type "B")
+b = f(b)
+c = f(b)  # E: Incompatible types in assignment (expression has type "B", variable has type "C")
+ at overload
+def f(x: 'A[t]') -> t: pass
+ at overload
+def f(x: 'B') -> 'B': pass
+class A(Generic[t]): pass
+class B: pass
+class C: pass
+
+[case testOverloadedInit]
+from typing import overload
+a, b = None, None # type: (A, B)
+a = A(a)
+a = A(b)
+a = A(object()) # E: No overload variant of "A" matches argument types [builtins.object]
+
+class A:
+  @overload
+  def __init__(self, a: 'A') -> None: pass
+  @overload
+  def __init__(self, b: 'B') -> None: pass
+class B: pass
+
+[case testIntersectionTypeCompatibility]
+from typing import overload, Callable
+o = None # type: object
+a = None # type: A
+
+a = f # E: Incompatible types in assignment (expression has type overloaded function, variable has type "A")
+o = f
+
+ at overload
+def f(a: 'A') -> None: pass
+ at overload
+def f(a: Callable[[], None]) -> None: pass
+class A: pass
+
+[case testCompatibilityOfIntersectionTypeObjectWithStdType]
+from typing import overload
+t, a = None, None # type: (type, A)
+
+a = A # E: Incompatible types in assignment (expression has type "A" (type object), variable has type "A")
+t = A
+
+class A:
+    @overload
+    def __init__(self, a: 'A') -> None: pass
+    @overload
+    def __init__(self, a: 'B') -> None: pass
+class B: pass
+
+[case testOverloadedGetitem]
+from typing import overload
+a, b = None, None # type: int, str
+a = A()[a]
+b = A()[a] # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+b = A()[b]
+a = A()[b] # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+class A:
+    @overload
+    def __getitem__(self, a: int) -> int: pass
+    @overload
+    def __getitem__(self, b: str) -> str: pass
+
+[case testOverloadedGetitemWithGenerics]
+from typing import TypeVar, Generic, overload
+t = TypeVar('t')
+a, b, c = None, None, None # type: (A, B, C[A])
+a = c[a]
+b = c[a] # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = c[b]
+b = c[b] # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+class C(Generic[t]):
+    @overload
+    def __getitem__(self, a: 'A') -> t: pass
+    @overload
+    def __getitem__(self, b: 'B') -> t: pass
+class A: pass
+class B: pass
+
+[case testImplementingOverloadedMethod]
+from typing import overload
+from abc import abstractmethod, ABCMeta
+
+class I(metaclass=ABCMeta):
+    @overload
+    @abstractmethod
+    def f(self) -> None: pass
+    @overload
+    @abstractmethod
+    def f(self, a: 'A') -> None: pass
+class A(I):
+    @overload
+    def f(self) -> None: pass
+    @overload
+    def f(self, a: 'A') -> None: pass
+
+[case testOverloadWithFunctionType]
+from typing import overload, Callable
+class A: pass
+ at overload
+def f(x: A) -> None: pass
+ at overload
+def f(x: Callable[[], None]) -> None: pass
+
+f(A())
+[builtins fixtures/function.pyi]
+
+[case testVarArgsOverload]
+from typing import overload, Any
+ at overload
+def f(x: 'A', *more: Any) -> 'A': pass
+ at overload
+def f(x: 'B', *more: Any) -> 'A': pass
+f(A())
+f(A(), A, A)
+f(B())
+f(B(), B)
+f(B(), B, B)
+f(object()) # E: No overload variant of "f" matches argument types [builtins.object]
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+[case testVarArgsOverload2]
+from typing import overload
+ at overload
+def f(x: 'A', *more: 'B') -> 'A': pass
+ at overload
+def f(x: 'B', *more: 'A') -> 'A': pass
+f(A(), B())
+f(A(), B(), B())
+f(A(), A(), B()) # E: No overload variant of "f" matches argument types [__main__.A, __main__.A, __main__.B]
+f(A(), B(), A()) # E: No overload variant of "f" matches argument types [__main__.A, __main__.B, __main__.A]
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+[case testOverloadWithTypeObject]
+from typing import overload
+ at overload
+def f(a: 'A', t: type) -> None: pass
+ at overload
+def f(a: 'B', t: type) -> None: pass
+f(A(), B)
+f(B(), A)
+class A: pass
+class B: pass
+[builtins fixtures/function.pyi]
+
+[case testOverloadedInitAndTypeObjectInOverload]
+from typing import overload
+ at overload
+def f(t: type) -> 'A': pass
+ at overload
+def f(t: 'A') -> 'B': pass
+a, b = None, None # type: (A, B)
+a = f(A)
+b = f(a)
+b = f(A) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = f(a) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+class A:
+   @overload
+   def __init__(self) -> None: pass
+   @overload
+   def __init__(self, a: 'A') -> None: pass
+class B:
+    pass
+
+[case testOverlappingErasedSignatures]
+from typing import overload, List
+ at overload
+def f(a: List[int]) -> int: pass
+ at overload
+def f(a: List[str]) -> int: pass
+list_int = [] # type: List[int]
+list_str = [] # type: List[str]
+list_object = [] # type: List[object]
+n = f(list_int)
+m = f(list_str)
+n = 1
+m = 1
+n = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+m = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+f(list_object) # E: Argument 1 to "f" has incompatible type List[object]; expected List[int]
+[builtins fixtures/list.pyi]
+
+[case testOverlappingOverloadSignatures]
+from typing import overload
+class A: pass
+class B(A): pass
+ at overload
+def f(x: B) -> int: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+ at overload
+def f(x: A) -> str: pass
+
+[case testContravariantOverlappingOverloadSignatures]
+from typing import overload
+class A: pass
+class B(A): pass
+ at overload
+def f(x: A) -> A: pass
+ at overload
+def f(x: B) -> B: pass # This is more specific than the first item, and thus
+                       # will never be called.
+
+[case testPartiallyCovariantOverlappingOverloadSignatures]
+from typing import overload
+class A: pass
+class B(A): pass
+ at overload
+def f(x: B) -> A: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+ at overload
+def f(x: A) -> B: pass
+
+[case testPartiallyContravariantOverloadSignatures]
+from typing import overload
+class A: pass
+class B(A): pass
+ at overload
+def g(x: A) -> int: pass # Fine, since A us supertype of B.
+ at overload
+def g(x: B) -> str: pass
+
+[case testCovariantOverlappingOverloadSignatures]
+from typing import overload
+class A: pass
+class B(A): pass
+ at overload
+def g(x: B) -> B: pass
+ at overload
+def g(x: A) -> A: pass
+
+[case testCovariantOverlappingOverloadSignaturesWithSomeSameArgTypes]
+from typing import overload
+class A: pass
+class B(A): pass
+ at overload
+def g(x: int, y: B) -> B: pass
+ at overload
+def g(x: int, y: A) -> A: pass
+
+[case testCovariantOverlappingOverloadSignaturesWithAnyType]
+from typing import Any, overload
+ at overload
+def g(x: int) -> int: pass
+ at overload
+def g(x: Any) -> Any: pass
+
+[case testContravariantOverlappingOverloadSignaturesWithAnyType]
+from typing import Any, overload
+ at overload
+def g(x: Any) -> Any: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+ at overload
+def g(x: int) -> int: pass
+
+[case testOverloadedLtAndGtMethods]
+from typing import overload
+class A:
+    def __lt__(self, x: A) -> int: pass
+    def __gt__(self, x: A) -> int: pass
+class B:
+    @overload
+    def __lt__(self, x: B) -> int: pass
+    @overload
+    def __lt__(self, x: A) -> int: pass
+    @overload
+    def __gt__(self, x: B) -> int: pass
+    @overload
+    def __gt__(self, x: A) -> int: pass
+A() < A()
+A() < B()
+B() < A()
+B() < B()
+A() < object() # E: Unsupported operand types for < ("A" and "object")
+B() < object() # E: No overload variant of "__lt__" of "B" matches argument types [builtins.object]
+
+[case testOverloadedForwardMethodAndCallingReverseMethod]
+from typing import overload
+class A:
+    @overload
+    def __add__(self, x: 'A') -> int: pass
+    @overload
+    def __add__(self, x: int) -> int: pass
+class B:
+    def __radd__(self, x: A) -> int: pass
+A() + A()
+A() + 1
+A() + B()
+A() + '' # E: No overload variant of "__add__" of "A" matches argument types [builtins.str]
+
+[case testOverrideOverloadedMethodWithMoreGeneralArgumentTypes]
+from typing import overload
+
+class IntSub(int): pass
+
+class StrSub(str): pass
+class A:
+    @overload
+    def f(self, x: IntSub) -> int: return 0
+    @overload
+    def f(self, x: StrSub) -> str: return ''
+class B(A):
+    @overload
+    def f(self, x: int) -> int: return 0
+    @overload
+    def f(self, x: str) -> str: return ''
+[out]
+
+[case testOverrideOverloadedMethodWithMoreSpecificArgumentTypes]
+from typing import overload
+
+class IntSub(int): pass
+
+class StrSub(str): pass
+class A:
+    @overload
+    def f(self, x: int) -> int: return 0
+    @overload
+    def f(self, x: str) -> str: return ''
+class B(A):
+    @overload
+    def f(self, x: IntSub) -> int: return 0
+    @overload
+    def f(self, x: str) -> str: return ''
+class C(A):
+    @overload
+    def f(self, x: int) -> int: return 0
+    @overload
+    def f(self, x: StrSub) -> str: return ''
+class D(A):
+    @overload
+    def f(self, x: int) -> int: return 0
+    @overload
+    def f(self, x: str) -> str: return ''
+[out]
+main:12: error: Signature of "f" incompatible with supertype "A"
+main:17: error: Signature of "f" incompatible with supertype "A"
+
+[case testOverloadingAndDucktypeCompatibility]
+from typing import overload, _promote
+
+class A: pass
+
+ at _promote(A)
+class B: pass
+
+ at overload
+def f(n: B) -> B:
+    return n
+ at overload
+def f(n: A) -> A:
+    return n
+
+f(B()) + 'x'  # E: Unsupported left operand type for + ("B")
+f(A()) + 'x'  # E: Unsupported left operand type for + ("A")
+
+[case testOverloadingAndIntFloatSubtyping]
+from typing import overload
+ at overload
+def f(x: float) -> None: pass
+ at overload
+def f(x: str) -> None: pass
+f(1.1)
+f('')
+f(1)
+f(()) # E: No overload variant of "f" matches argument types [Tuple[]]
+[builtins fixtures/primitives.pyi]
+[out]
+
+[case testOverloadingVariableInputs]
+from typing import overload
+ at overload
+def f(x: int, y: int) -> None: pass
+ at overload
+def f(x: int) -> None: pass
+f(1)
+f(1, 2)
+z = (1, 2)
+f(*z)
+[builtins fixtures/primitives.pyi]
+[out]
+
+[case testTypeInferenceSpecialCaseWithOverloading]
+from typing import overload
+
+class A:
+    def __add__(self, x: A) -> A: pass
+class B:
+    def __radd__(self, x: A) -> B: pass
+
+ at overload
+def f(x: A) -> A: pass
+ at overload
+def f(x: B) -> B: pass
+
+f(A() + B())() # E: "B" not callable
+
+[case testKeywordArgOverload]
+from typing import overload
+ at overload
+def f(x: int, y: str) -> int: pass
+ at overload
+def f(x: str, y: int) -> str: pass
+f(x=1, y='')() # E: "int" not callable
+f(y=1, x='')() # E: "str" not callable
+
+[case testIgnoreOverloadVariantBasedOnKeywordArg]
+from typing import overload
+ at overload
+def f(x: int) -> int: pass
+ at overload
+def f(y: int) -> str: pass
+f(x=1)() # E: "int" not callable
+f(y=1)() # E: "str" not callable
+
+[case testOverloadWithTupleVarArg]
+from typing import overload
+ at overload
+def f(x: int, y: str) -> int: pass
+ at overload
+def f(*x: str) -> str: pass
+f(*(1,))() # E: No overload variant of "f" matches argument types [Tuple[builtins.int]]
+f(*('',))() # E: "str" not callable
+f(*(1, ''))() # E: "int" not callable
+f(*(1, '', 1))() # E: No overload variant of "f" matches argument types [Tuple[builtins.int, builtins.str, builtins.int]]
+
+[case testPreferExactSignatureMatchInOverload]
+from typing import overload, List
+ at overload
+def f(x: int, y: List[int] = None) -> int: pass
+ at overload
+def f(x: int, y: List[str] = None) -> int: pass
+f(y=[1], x=0)() # E: "int" not callable
+f(y=[''], x=0)() # E: "int" not callable
+a = f(y=[['']], x=0) # E: List item 0 has incompatible type List[str]
+a() # E: "int" not callable
+[builtins fixtures/list.pyi]
+
+[case testOverloadWithDerivedFromAny]
+from typing import Any, overload
+Base = None  # type: Any
+
+class C:
+    @overload
+    def __init__(self, a: str) -> None: pass
+    @overload
+    def __init__(self, a: int) -> None: pass
+
+class Derived(Base):
+    def to_dict(self) -> C:
+        return C(self)  # fails without the fix for #1363
+C(Derived())  # fails without the hack
+C(Base())  # Always ok
+
+[case testOverloadWithBoundedTypeVar]
+from typing import overload, TypeVar
+T = TypeVar('T', bound=str)
+ at overload
+def f(x: T) -> T: pass
+ at overload
+def f(x: int) -> bool: pass
+class mystr(str): pass
+
+f('x')() # E: "str" not callable
+f(1)() # E: "bool" not callable
+f(1.1) # E: No overload variant of "f" matches argument types [builtins.float]
+f(mystr())() # E: "mystr" not callable
+[builtins fixtures/primitives.pyi]
+
+[case testOverloadedCallWithVariableTypes]
+from typing import overload, TypeVar, List
+T = TypeVar('T', bound=str)
+ at overload
+def f(x: T) -> T: pass
+ at overload
+def f(x: List[T]) -> None: pass
+class mystr(str): pass
+
+U = TypeVar('U', bound=mystr)
+V = TypeVar('V')
+def g(x: U, y: V) -> None:
+    f(x)() # E: "mystr" not callable
+    f(y) # E: No overload variant of "f" matches argument types [V`-2]
+    a = f([x]) # E: "f" does not return a value
+    f([y]) # E: Type argument 1 of "f" has incompatible value "V"
+    f([x, y]) # E: Type argument 1 of "f" has incompatible value "object"
+[builtins fixtures/list.pyi]
+[out]
+
+[case testOverlapWithTypeVars]
+from typing import overload, TypeVar, Sequence
+T = TypeVar('T', bound=str)
+ at overload
+def f(x: Sequence[T]) -> None: pass
+ at overload
+def f(x: Sequence[int]) -> int: pass
+# These are considered overlapping despite the bound on T due to runtime type erasure.
+[out]
+main:4: error: Overloaded function signatures 1 and 2 overlap with incompatible return types
+
+[case testOverlapWithTypeVarsWithValues]
+from typing import overload, TypeVar
+AnyStr = TypeVar('AnyStr', bytes, str)
+
+ at overload
+def f(x: int) -> int: pass
+ at overload
+def f(x: AnyStr) -> str: pass
+
+f(1)() # E: "int" not callable
+f('1')() # E: "str" not callable
+f(b'1')() # E: "str" not callable
+f(1.0) # E: No overload variant of "f" matches argument types [builtins.float]
+
+ at overload
+def g(x: AnyStr, *a: AnyStr) -> None: pass
+ at overload
+def g(x: int, *a: AnyStr) -> None: pass
+
+g('foo')
+g('foo', 'bar')
+g('foo', b'bar') # E: Type argument 1 of "g" has incompatible value "object"
+g(1)
+g(1, 'foo')
+g(1, 'foo', b'bar') # E: Type argument 1 of "g" has incompatible value "object"
+[builtins fixtures/primitives.pyi]
+
+[case testBadOverlapWithTypeVarsWithValues]
+from typing import overload, TypeVar
+AnyStr = TypeVar('AnyStr', bytes, str)
+
+ at overload
+def f(x: AnyStr) -> None: pass # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+ at overload
+def f(x: str) -> bool: pass
+[builtins fixtures/primitives.pyi]
+
+[case testOverlappingOverloadCounting]
+from typing import overload
+class A: pass
+class B(A): pass
+ at overload
+def f(x: int) -> None: pass
+ at overload
+def f(x: B) -> str: pass # E: Overloaded function signatures 2 and 3 overlap with incompatible return types
+ at overload
+def f(x: A) -> int: pass
+
+[case testOverloadWithTupleMatchingTypeVar]
+from typing import TypeVar, Generic, Tuple, overload
+
+T = TypeVar('T')
+
+class A(Generic[T]):
+    @overload
+    def f(self, arg: T) -> None:
+        pass
+    @overload
+    def f(self, arg: T, default: int) -> None:
+        pass
+
+b = A()  # type: A[Tuple[int, int]]
+b.f((0, 0))
+b.f((0, '')) # E: Argument 1 to "f" of "A" has incompatible type "Tuple[int, str]"; expected "Tuple[int, int]"
+
+[case testSingleOverload]
+from typing import overload
+ at overload
+def f(a: int) -> None: pass
+def f(a: str) -> None: pass
+[out]
+main:2: error: Single overload definition, multiple required
+main:4: error: Name 'f' already defined
+
+[case testSingleOverload2]
+from typing import overload
+def f(a: int) -> None: pass
+ at overload
+def f(a: str) -> None: pass
+[out]
+main:3: error: Name 'f' already defined
+main:3: error: Single overload definition, multiple required
+
+[case testNonconsecutiveOverloads]
+from typing import overload
+ at overload
+def f(a: int) -> None: pass
+1
+ at overload
+def f(a: str) -> None: pass
+[out]
+main:2: error: Single overload definition, multiple required
+main:5: error: Name 'f' already defined
+main:5: error: Single overload definition, multiple required
+
+[case testNonconsecutiveOverloadsMissingFirstOverload]
+from typing import overload
+def f(a: int) -> None: pass
+1
+ at overload
+def f(a: str) -> None: pass
+[out]
+main:4: error: Name 'f' already defined
+main:4: error: Single overload definition, multiple required
+
+[case testNonconsecutiveOverloadsMissingLaterOverload]
+from typing import overload
+ at overload
+def f(a: int) -> None: pass
+1
+def f(a: str) -> None: pass
+[out]
+main:2: error: Single overload definition, multiple required
+main:5: error: Name 'f' already defined
+
+[case testOverloadTuple]
+from typing import overload, Tuple
+ at overload
+def f(x: int, y: Tuple[str, ...]) -> None: pass
+ at overload
+def f(x: int, y: str) -> None: pass
+f(1, ('2', '3'))
+f(1, (2, '3')) # E: Argument 2 to "f" has incompatible type "Tuple[int, str]"; expected Tuple[str, ...]
+f(1, ('2',))
+f(1, '2')
+f(1, (2, 3)) # E: Argument 2 to "f" has incompatible type "Tuple[int, int]"; expected Tuple[str, ...]
+x = ('2', '3')  # type: Tuple[str, ...]
+f(1, x)
+y = (2, 3)  # type: Tuple[int, ...]
+f(1, y) # E: Argument 2 to "f" has incompatible type Tuple[int, ...]; expected Tuple[str, ...]
+[builtins fixtures/tuple.pyi]
+
+[case testCallableSpecificOverload]
+from typing import overload, Callable
+ at overload
+def f(a: Callable[[], int]) -> None: pass
+ at overload
+def f(a: str) -> None: pass
+f(0)  # E: No overload variant of "f" matches argument types [builtins.int]
diff --git a/test-data/unit/check-python2.test b/test-data/unit/check-python2.test
new file mode 100644
index 0000000..7a9446b
--- /dev/null
+++ b/test-data/unit/check-python2.test
@@ -0,0 +1,242 @@
+-- Type checker test cases for Python 2.x mode.
+
+
+[case testUnicode]
+u = u'foo'
+u = unicode()
+s = ''
+s = u'foo' # E: Incompatible types in assignment (expression has type "unicode", variable has type "str")
+s = b'foo'
+[builtins_py2 fixtures/python2.pyi]
+
+[case testTypeVariableUnicode]
+from typing import TypeVar
+T = TypeVar(u'T')
+
+[case testNamedTuple*sh Unicode]
+from typing import NamedTuple
+from collections import namedtuple
+N = NamedTuple(u'N', [(u'x', int)])
+n = namedtuple(u'n', u'x y')
+
+[builtins fixtures/dict.pyi]
+
+[case testPrintStatement]
+print ''() # E: "str" not callable
+print 1, 1() # E: "int" not callable
+
+[case testPrintStatementWithTarget]
+class A:
+    def write(self, s):
+        # type: (str) -> None
+        pass
+
+print >>A(), ''
+print >>None, ''
+print >>1, '' # E: "int" has no attribute "write"
+print >>(None + ''), None # E: Unsupported left operand type for + (None)
+
+[case testDivision]
+class A:
+    def __div__(self, x):
+        # type: (int) -> str
+        pass
+s = A() / 1
+s = ''
+s = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+[case testStrUnicodeCompatibility]
+import typing
+def f(x):
+    # type: (unicode) -> None
+    pass
+f('')
+f(u'')
+f(b'')
+[builtins_py2 fixtures/python2.pyi]
+
+[case testStaticMethodWithCommentSignature]
+class A:
+    @staticmethod
+    def f(x): # type: (int) -> str
+        return ''
+A.f(1)
+A.f('') # E: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+[builtins_py2 fixtures/staticmethod.pyi]
+
+[case testRaiseTuple]
+import typing
+raise BaseException, "a"
+raise BaseException, "a", None
+[builtins_py2 fixtures/exception.pyi]
+
+[case testTryExceptWithTuple]
+try:
+    None
+except BaseException, e:
+    e() # E: "BaseException" not callable
+[builtins_py2 fixtures/exception.pyi]
+
+[case testAlternateNameSuggestions]
+class Foo(object):
+    def say_hello(self):
+        pass
+    def say_hell(self):
+        pass
+    def say_hullo(self):
+        pass
+    def say_goodbye(self):
+        pass
+    def go_away(self):
+        pass
+    def go_around(self):
+        pass
+    def append(self):
+        pass
+    def extend(self):
+        pass
+    def _add(self):
+        pass
+
+f = Foo()
+f.say_hallo() # E: "Foo" has no attribute "say_hallo"; maybe "say_hullo", "say_hello", or "say_hell"?
+f.go_array() # E: "Foo" has no attribute "go_array"; maybe "go_away"?
+f.add() # E: "Foo" has no attribute "add"; maybe "append", "extend", or "_add"?
+
+[case testTupleArgListDynamicallyTyped]
+def f(x, (y, z)):
+    x = y + z
+f(1, 1)
+f(1, (1, 2))
+
+[case testTupleArgListAnnotated]
+from typing import Tuple
+def f(x, (y, z)): # type: (object, Tuple[int, str]) -> None
+    x() # E
+    y() # E
+    z() # E
+f(object(), (1, ''))
+f(1, 1) # E
+[builtins_py2 fixtures/tuple.pyi]
+[out]
+main:3: error: "object" not callable
+main:4: error: "int" not callable
+main:5: error: "str" not callable
+main:7: error: Argument 2 to "f" has incompatible type "int"; expected "Tuple[int, str]"
+
+[case testNestedTupleArgListAnnotated]
+from typing import Tuple
+def f(x, (y, (a, b))): # type: (object, Tuple[int, Tuple[str, int]]) -> None
+    x() # E
+    y() # E
+    a() # E
+    b() # E
+f(object(), (1, ('', 2)))
+f(1, 1) # E
+[builtins fixtures/tuple.pyi]
+[out]
+main:3: error: "object" not callable
+main:4: error: "int" not callable
+main:5: error: "str" not callable
+main:6: error: "int" not callable
+main:8: error: Argument 2 to "f" has incompatible type "int"; expected "Tuple[int, Tuple[str, int]]"
+
+[case testBackquoteExpr]
+`1`.x # E: "str" has no attribute "x"
+
+[case testPython2OnlyStdLibModuleWithoutStub]
+import asyncio
+import Bastion
+[out]
+main:1: error: Cannot find module named 'asyncio'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: No library stub file for standard library module 'Bastion'
+main:2: note: (Stub files are from https://github.com/python/typeshed)
+
+[case testImportFromPython2Builtin]
+from __builtin__ import int as i
+x = 1 # type: i
+y = '' # type: i  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testImportPython2Builtin]
+import __builtin__
+x = 1 # type: __builtin__.int
+y = '' # type: __builtin__.int  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testImportAsPython2Builtin]
+import __builtin__ as bi
+x = 1 # type: bi.int
+y = '' # type: bi.int  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testImportFromPython2BuiltinOverridingDefault]
+from __builtin__ import int
+x = 1 # type: int
+y = '' # type: int  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+-- Copied from check-functions.test
+[case testEllipsisWithArbitraryArgsOnBareFunctionInPython2]
+def f(x, y, z): # type: (...) -> None
+    pass
+
+-- Copied from check-functions.test
+[case testEllipsisWithSomethingAfterItFailsInPython2]
+def f(x, y, z): # type: (..., int) -> None
+    pass
+[out]
+main:1: error: Ellipses cannot accompany other argument types in function type signature.
+
+[case testLambdaTupleArgInPython2]
+f = lambda (x, y): x + y
+f((0, 0))
+[out]
+
+[case testLambdaSingletonTupleArgInPython2]
+f = lambda (x,): x + 1
+f((0,))
+[out]
+
+[case testLambdaNoTupleArgInPython2]
+f = lambda (x): x + 1
+f(0)
+[out]
+
+[case testDefTupleEdgeCasesPython2]
+def f((x,)): return x
+def g((x)): return x
+f(0) + g(0)
+[out]
+
+[case testLambdaAsSortKeyForTuplePython2]
+from typing import Any, Tuple, Callable
+def bar(key):
+    # type: (Callable[[Tuple[int, int]], int]) -> int
+    pass
+def foo():
+    # type: () -> int
+    return bar(key=lambda (a, b): a)
+[out]
+
+[case testImportBuiltins]
+# flags: --fast-parser
+import __builtin__
+__builtin__.str
+
+[case testUnicodeAlias]
+from typing import List
+Alias = List[u'Foo']
+class Foo: pass
+[builtins_py2 fixtures/python2.pyi]
+
+[case testExec]
+exec('print 1 + 1')
+
+[case testUnicodeDocStrings]
+# flags: --python-version=2.7
+__doc__ = u"unicode"
+
+class A:
+    u"unicode"
+
+def f():
+    # type: () -> None
+    u"unicode"
diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test
new file mode 100644
index 0000000..98bcfa1
--- /dev/null
+++ b/test-data/unit/check-selftype.test
@@ -0,0 +1,358 @@
+[case testSelfTypeInstance]
+from typing import TypeVar
+
+T = TypeVar('T', bound='A', covariant=True)
+
+class A:
+    def copy(self: T) -> T: pass
+
+class B(A):
+    pass
+
+reveal_type(A().copy)  # E: Revealed type is 'def () -> __main__.A*'
+reveal_type(B().copy)  # E: Revealed type is 'def () -> __main__.B*'
+reveal_type(A().copy())  # E: Revealed type is '__main__.A*'
+reveal_type(B().copy())  # E: Revealed type is '__main__.B*'
+
+[builtins fixtures/bool.pyi]
+
+[case testSelfTypeStaticAccess]
+from typing import TypeVar
+
+T = TypeVar('T', bound='A', covariant=True)
+class A:
+    def copy(self: T) -> T: pass
+
+class B(A):
+    pass
+
+# Erased instances appear on reveal_type; unrelated to self type
+def f(a: A) -> None: pass
+f(A.copy(A()))
+f(A.copy(B()))
+f(B.copy(B()))
+
+# TODO: make it an error
+# f(B.copy(A()))
+
+def g(a: B) -> None: pass
+g(A.copy(A()))  # E: Argument 1 to "g" has incompatible type "A"; expected "B"
+g(A.copy(B()))
+g(B.copy(B()))
+
+[builtins fixtures/bool.pyi]
+
+[case testSelfTypeReturn]
+from typing import TypeVar, Type
+
+R = TypeVar('R')
+def _type(self: R) -> Type[R]: pass
+
+T = TypeVar('T', bound='A', covariant=True)
+class A:
+    def copy(self: T) -> T:
+        if B():
+            return A()  # E: Incompatible return value type (got "A", expected "T")
+        elif A():
+            return B()  # E: Incompatible return value type (got "B", expected "T")
+        reveal_type(_type(self))  # E: Revealed type is 'Type[T`-1]'
+        return reveal_type(_type(self)())  # E: Revealed type is 'T`-1'
+
+class B(A):
+    pass
+
+Q = TypeVar('Q', bound='C', covariant=True)
+class C:
+    def __init__(self, a: int) -> None: pass
+
+    def copy(self: Q) -> Q:
+        if self:
+            return reveal_type(_type(self)(1))  # E: Revealed type is 'Q`-1'
+        else:
+            return _type(self)()  # E: Too few arguments for "C"
+
+
+[builtins fixtures/bool.pyi]
+
+[case testSelfTypeClass]
+from typing import TypeVar, Type
+
+T = TypeVar('T', bound='A')
+
+class A:
+    @classmethod
+    def new(cls: Type[T]) -> T:
+        return reveal_type(cls())  # E: Revealed type is 'T`-1'
+
+class B(A):
+    pass
+
+Q = TypeVar('Q', bound='C', covariant=True)
+class C:
+    def __init__(self, a: int) -> None: pass
+
+    @classmethod
+    def new(cls: Type[Q]) -> Q:
+        if cls:
+            return cls(1)
+        else:
+            return cls()  # E: Too few arguments for "C"
+
+
+reveal_type(A.new)  # E: Revealed type is 'def () -> __main__.A*'
+reveal_type(B.new)  # E: Revealed type is 'def () -> __main__.B*'
+reveal_type(A.new())  # E: Revealed type is '__main__.A*'
+reveal_type(B.new())  # E: Revealed type is '__main__.B*'
+
+[builtins fixtures/classmethod.pyi]
+
+[case testSelfTypeOverride]
+from typing import TypeVar, cast
+
+T = TypeVar('T', bound='A', covariant=True)
+
+class A:
+    def copy(self: T) -> T: pass
+
+class B(A):
+    pass
+
+Q = TypeVar('Q', bound='C', covariant=True)
+class C(A):
+    def copy(self: Q) -> Q: pass
+
+reveal_type(C().copy)  # E: Revealed type is 'def () -> __main__.C*'
+reveal_type(C().copy())  # E: Revealed type is '__main__.C*'
+reveal_type(cast(A, C()).copy)  # E: Revealed type is 'def () -> __main__.A*'
+reveal_type(cast(A, C()).copy())  # E: Revealed type is '__main__.A*'
+
+[builtins fixtures/bool.pyi]
+
+[case testSelfTypeSuper]
+from typing import TypeVar, cast
+
+T = TypeVar('T', bound='A', covariant=True)
+
+class A:
+    def copy(self: T) -> T: pass
+
+Q = TypeVar('Q', bound='B', covariant=True)
+class B(A):
+    def copy(self: Q) -> Q:
+        reveal_type(self)  # E: Revealed type is 'Q`-1'
+        reveal_type(super().copy)  # E: Revealed type is 'def () -> Q`-1'
+        return super().copy()
+
+[builtins fixtures/bool.pyi]
+
+[case testSelfTypeRecursiveBinding]
+from typing import TypeVar, Callable, Type
+
+T = TypeVar('T', bound='A', covariant=True)
+class A:
+    # TODO: This is potentially unsafe, as we use T in an argument type
+    def copy(self: T, factory: Callable[[T], T]) -> T:
+        return factory(self)
+
+    @classmethod
+    def new(cls: Type[T], factory: Callable[[T], T]) -> T:
+        reveal_type(cls)   # E: Revealed type is 'Type[T`-1]'
+        reveal_type(cls())   # E: Revealed type is 'T`-1'
+        cls(2)  # E: Too many arguments for "A"
+        return cls()
+
+class B(A):
+    pass
+
+reveal_type(A().copy)  # E: Revealed type is 'def (factory: def (__main__.A*) -> __main__.A*) -> __main__.A*'
+reveal_type(B().copy)  # E: Revealed type is 'def (factory: def (__main__.B*) -> __main__.B*) -> __main__.B*'
+reveal_type(A.new)  # E: Revealed type is 'def (factory: def (__main__.A*) -> __main__.A*) -> __main__.A*'
+reveal_type(B.new)  # E: Revealed type is 'def (factory: def (__main__.B*) -> __main__.B*) -> __main__.B*'
+
+[builtins fixtures/classmethod.pyi]
+
+[case testSelfTypeBound]
+from typing import TypeVar, Callable, cast
+
+TA = TypeVar('TA', bound='A', covariant=True)
+
+class A:
+    def copy(self: TA) -> TA:
+        pass
+
+class C(A):
+    def copy(self: C) -> C:
+        pass
+
+class D(A):
+   def copy(self: A) -> A:  # E: Return type of "copy" incompatible with supertype "A"
+       pass
+
+TB = TypeVar('TB', bound='B', covariant=True)
+class B(A):
+    x = 1
+    def copy(self: TB) -> TB:
+        reveal_type(self.x)  # E: Revealed type is 'builtins.int'
+        return cast(TB, None)
+
+[builtins fixtures/bool.pyi]
+
+-- # TODO: fail for this
+-- [case testSelfTypeBare]
+-- from typing import TypeVar, Type
+--
+-- T = TypeVar('T', bound='E')
+--
+-- class E:
+--     def copy(self: T, other: T) -> T: pass
+
+[case testSelfTypeClone]
+from typing import TypeVar, Type
+
+T = TypeVar('T', bound='C')
+
+class C:
+    def copy(self: T) -> T:
+        return self
+
+    @classmethod
+    def new(cls: Type[T]) -> T:
+        return cls()
+
+def clone(arg: T) -> T:
+    reveal_type(arg.copy)  # E: Revealed type is 'def () -> T`-1'
+    return arg.copy()
+
+
+def make(cls: Type[T]) -> T:
+    reveal_type(cls.new)  # E: Revealed type is 'def () -> T`-1'
+    return cls.new()
+
+[builtins fixtures/classmethod.pyi]
+
+[case testSelfTypeGeneric]
+from typing import TypeVar
+
+T = TypeVar('T', int, str)
+
+class A:
+    pass
+
+class B(A):
+    def __init__(self, arg: T) -> None:
+        super(B, self).__init__()
+
+[case testSelfTypeNonsensical]
+from typing import TypeVar, Type
+
+T = TypeVar('T', bound=str)
+class A:
+    def foo(self: T) -> T:   # E: The erased type of self 'builtins.str' is not a supertype of its class '__main__.A'
+        return self
+
+    @classmethod
+    def cfoo(cls: Type[T]) -> T:  # E: The erased type of self 'Type[builtins.str]' is not a supertype of its class 'Type[__main__.A]'
+        return cls()
+
+Q = TypeVar('Q', bound='B')
+class B:
+    def foo(self: Q) -> Q:
+        return self
+
+    @classmethod
+    def cfoo(cls: Type[Q]) -> Q:
+        return cls()
+
+class C:
+    def foo(self: C) -> C: return self
+
+    @classmethod
+    def cfoo(cls: Type[C]) -> C:
+        return cls()
+
+class D:
+    def foo(self: str) -> str:  # E: The erased type of self 'builtins.str' is not a supertype of its class '__main__.D'
+        return self
+
+    @staticmethod
+    def bar(self: str) -> str:
+        return self
+
+    @classmethod
+    def cfoo(cls: Type[str]) -> str:  # E: The erased type of self 'Type[builtins.str]' is not a supertype of its class 'Type[__main__.D]'
+        return cls()
+
+[builtins fixtures/classmethod.pyi]
+
+[case testSelfTypeLambdaDefault]
+from typing import Callable
+class C:
+    @classmethod
+    def foo(cls,
+            arg: Callable[[int], str] = lambda a: ''
+            ) -> None:
+        pass
+
+    def bar(self,
+            arg: Callable[[int], str] = lambda a: ''
+            ) -> None:
+        pass
+[builtins fixtures/classmethod.pyi]
+
+[case testSelfTypeNew]
+from typing import TypeVar, Type
+
+T = TypeVar('T', bound=A)
+class A:
+    def __new__(cls: Type[T]) -> T:
+        return cls()
+
+    def __init_subclass__(cls: Type[T]) -> None:
+        pass
+
+class B:
+    def __new__(cls: Type[T]) -> T:  # E: The erased type of self 'Type[__main__.A]' is not a supertype of its class 'Type[__main__.B]'
+        return cls()
+
+    def __init_subclass__(cls: Type[T]) -> None:  # E: The erased type of self 'Type[__main__.A]' is not a supertype of its class 'Type[__main__.B]'
+        pass
+
+class C:
+    def __new__(cls: Type[C]) -> C:
+        return cls()
+
+    def __init_subclass__(cls: Type[C]) -> None:
+        pass
+
+class D:
+    def __new__(cls: D) -> D:  # E: The erased type of self '__main__.D' is not a supertype of its class 'Type[__main__.D]'
+        return cls
+
+    def __init_subclass__(cls: D) -> None:  # E: The erased type of self '__main__.D' is not a supertype of its class 'Type[__main__.D]'
+        pass
+
+class E:
+    def __new__(cls) -> E:
+        reveal_type(cls)  # E: Revealed type is 'def () -> __main__.E'
+        return cls()
+
+    def __init_subclass__(cls) -> None:
+        reveal_type(cls)  # E: Revealed type is 'def () -> __main__.E'
+
+[case testSelfTypeProperty]
+from typing import TypeVar
+
+T = TypeVar('T', bound='A')
+
+class A:
+    @property
+    def member(self: T) -> T:
+        pass
+
+class B(A):
+    pass
+
+reveal_type(A().member)  # E: Revealed type is '__main__.A*'
+reveal_type(B().member)  # E: Revealed type is '__main__.B*'
+
+[builtins fixtures/property.pyi]
diff --git a/test-data/unit/check-semanal-error.test b/test-data/unit/check-semanal-error.test
new file mode 100644
index 0000000..0a07829
--- /dev/null
+++ b/test-data/unit/check-semanal-error.test
@@ -0,0 +1,81 @@
+-- Type checking after an error during semantic analysis
+-- -----------------------------------------------------
+--
+-- This tests both the semantic analyzer (that it does not generate
+-- corrupt state on error) and the type checker (that it can deal with
+-- whatever state the semantic analyzer sets up).
+
+-- TODO:
+--  - invalid type in annotation
+--  - invalid function comment type annotation
+--  - invalid multiple assignment type annotation
+--  - using a type variable as a value
+--  - using special names defined in typing as values
+
+[case testMissingModuleImport1]
+import m # E
+m.foo()
+m.x = m.y
+1() # E
+[out]
+main:1: error: Cannot find module named 'm'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:4: error: "int" not callable
+
+[case testMissingModuleImport2]
+from m import x # E
+x.foo()
+x.a = x.b
+1() # E
+[out]
+main:1: error: Cannot find module named 'm'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:4: error: "int" not callable
+
+[case testMissingModuleImport3]
+from m import * # E
+x # E
+1() # E
+[out]
+main:1: error: Cannot find module named 'm'
+main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Name 'x' is not defined
+main:3: error: "int" not callable
+
+[case testInvalidBaseClass1]
+class A(X): # E: Name 'X' is not defined
+    x = 1
+A().foo(1)
+A().x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testInvalidBaseClass2]
+X = 1
+class A(X): # E
+    x = 1
+A().foo(1)
+A().x = '' # E
+[out]
+main:2: error: Invalid type "__main__.X"
+main:2: error: Invalid base class
+main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+
+[case testInvalidNumberOfTypeArgs]
+from typing import TypeVar
+T = TypeVar('T')
+class C:  # Forgot to add type params here
+    def __init__(self, t: T) -> None: pass
+c = C(t=3)  # type: C[int]  # E: "C" expects no type arguments, but 1 given
+
+[case testBreakOutsideLoop]
+break # E: 'break' outside loop
+
+[case testContinueOutsideLoop]
+continue # E: 'continue' outside loop
+
+[case testYieldOutsideFunction]
+yield # E: 'yield' outside function
+
+[case testYieldFromOutsideFunction]
+x = 1
+yield from x # E: 'yield from' outside function
diff --git a/test-data/unit/check-statements.test b/test-data/unit/check-statements.test
new file mode 100644
index 0000000..1fa0cdc
--- /dev/null
+++ b/test-data/unit/check-statements.test
@@ -0,0 +1,1451 @@
+-- Return statement
+-- ----------------
+
+
+[case testReturnValue]
+import typing
+def f() -> 'A':
+    return A()
+def g() -> 'B':
+    return A()
+class A:
+    pass
+class B:
+    pass
+[out]
+main:5: error: Incompatible return value type (got "A", expected "B")
+
+[case testReturnSubtype]
+import typing
+def f() -> 'B':
+    return A()
+def g() -> 'A':
+    return B()
+class A:
+    pass
+class B(A):
+    pass
+[out]
+main:3: error: Incompatible return value type (got "A", expected "B")
+
+[case testReturnWithoutAValue]
+import typing
+def f() -> 'A':
+    return
+def g() -> None:
+    return
+class A:
+    pass
+[out]
+main:3: error: Return value expected
+
+[case testReturnNoneInFunctionReturningNone]
+import typing
+def f() -> None:
+    return None
+def g() -> None:
+    return f()  # E: No return value expected
+[out]
+
+[case testReturnInGenerator]
+from typing import Generator
+def f() -> Generator[int, None, str]:
+    yield 1
+    return "foo"
+[out]
+
+[case testEmptyReturnInGenerator]
+from typing import Generator
+def f() -> Generator[int, None, str]:
+    yield 1
+    return  # E: Return value expected
+[out]
+
+[case testEmptyReturnInNoneTypedGenerator]
+from typing import Generator
+def f() -> Generator[int, None, None]:
+    yield 1
+    return
+[out]
+
+[case testNonEmptyReturnInNoneTypedGenerator]
+from typing import Generator
+def f() -> Generator[int, None, None]:
+    yield 1
+    return 42  # E: No return value expected
+[out]
+
+[case testReturnInIterator]
+from typing import Iterator
+def f() -> Iterator[int]:
+    yield 1
+    return "foo"
+[out]
+
+
+-- If statement
+-- ------------
+
+
+[case testIfStatement]
+
+a = None # type: A
+a2 = None # type: A
+a3 = None # type: A
+b = None # type: bool
+if a:
+    a = b # E: Incompatible types in assignment (expression has type "bool", variable has type "A")
+elif a2:
+    a = b # E: Incompatible types in assignment (expression has type "bool", variable has type "A")
+elif a3:
+    a = b # E: Incompatible types in assignment (expression has type "bool", variable has type "A")
+else:
+    a = b # E: Incompatible types in assignment (expression has type "bool", variable has type "A")
+if b:
+    pass
+elif b:
+    pass
+if b:
+    pass
+
+class A: pass
+[builtins fixtures/bool.pyi]
+
+
+-- Loops
+-- -----
+
+
+[case testWhileStatement]
+
+a = None # type: A
+b = None # type: bool
+while a:
+    a = b    # Fail
+else:
+    a = b    # Fail
+while b:
+    b = b
+
+class A: pass
+[builtins fixtures/bool.pyi]
+[out]
+main:5: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+main:7: error: Incompatible types in assignment (expression has type "bool", variable has type "A")
+
+[case testForStatement]
+
+a = None # type: A
+b = None # type: object
+for a in [A()]:
+    a = b    # Fail
+else:
+    a = b    # Fail
+
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+main:5: error: Incompatible types in assignment (expression has type "object", variable has type "A")
+main:7: error: Incompatible types in assignment (expression has type "object", variable has type "A")
+
+[case testBreakStatement]
+import typing
+while None:
+    break
+[builtins fixtures/bool.pyi]
+[out]
+
+[case testContinueStatement]
+import typing
+while None:
+    continue
+[builtins fixtures/bool.pyi]
+[out]
+
+[case testForStatementTypeComments]
+# flags: --fast-parser
+from typing import List, Union
+x = []  # type: List[int]
+
+for y in x:  # type: str  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+    pass
+
+for z in x:  # type: int
+    pass
+
+for w in x:  # type: Union[int, str]
+    reveal_type(w)  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+
+for v in x:  # type: int, int  # E: Invalid tuple literal type
+    pass
+[builtins fixtures/list.pyi]
+
+[case testForStatementMultipleTypeComments]
+# flags: --fast-parser
+from typing import List, Tuple
+x = []  # type: List[Tuple[int, int]]
+
+for y in x:  # type: int, int  # E: Invalid tuple literal type
+    pass
+
+for z in x:  # type: Tuple[int, int]
+    pass
+
+for w,v in x:  # type: int, str  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+    pass
+
+for a, b in x:  # type: int, int, int  # E: Incompatible number of tuple items
+    pass
+[builtins fixtures/list.pyi]
+
+
+-- Operator assignment
+-- -------------------
+
+
+[case testPlusAssign]
+
+a, b, c = None, None, None # type: (A, B, C)
+a += b   # Fail
+b += a   # Fail
+c += a   # Fail
+a += c
+
+class A:
+    def __add__(self, x: 'C') -> 'A': pass
+
+class B:
+    def __add__(self, x: A) -> 'C': pass
+
+class C: pass
+[out]
+main:3: error: Unsupported operand types for + ("A" and "B")
+main:4: error: Result type of + incompatible in assignment
+main:5: error: Unsupported left operand type for + ("C")
+
+[case testMinusAssign]
+
+a, b, c = None, None, None # type: (A, B, C)
+a -= b   # Fail
+b -= a   # Fail
+c -= a   # Fail
+a -= c
+
+class A:
+    def __sub__(self, x: 'C') -> 'A': pass
+
+class B:
+    def __sub__(self, x: A) -> 'C': pass
+
+class C: pass
+[out]
+main:3: error: Unsupported operand types for - ("A" and "B")
+main:4: error: Result type of - incompatible in assignment
+main:5: error: Unsupported left operand type for - ("C")
+
+[case testMulAssign]
+
+a, c = None, None # type: (A, C)
+a *= a   # Fail
+c *= a   # Fail
+a *= c
+
+class A:
+    def __mul__(self, x: 'C') -> 'A': pass
+
+class C: pass
+[out]
+main:3: error: Unsupported operand types for * ("A" and "A")
+main:4: error: Unsupported left operand type for * ("C")
+
+[case testMatMulAssign]
+a, c = None, None # type: (A, C)
+a @= a   # E: Unsupported operand types for @ ("A" and "A")
+c @= a   # E: Unsupported left operand type for @ ("C")
+a @= c
+
+class A:
+    def __matmul__(self, x: 'C') -> 'A': pass
+
+class C: pass
+
+[case testDivAssign]
+
+a, c = None, None # type: (A, C)
+a /= a   # Fail
+c /= a   # Fail
+a /= c
+
+class A:
+    def __truediv__(self, x: 'C') -> 'A': pass
+
+class C: pass
+[out]
+main:3: error: Unsupported operand types for / ("A" and "A")
+main:4: error: Unsupported left operand type for / ("C")
+
+[case testPowAssign]
+
+a, c = None, None # type: (A, C)
+a **= a   # Fail
+c **= a   # Fail
+a **= c
+
+class A:
+    def __pow__(self, x: 'C') -> 'A': pass
+
+class C: pass
+[out]
+main:3: error: Unsupported operand types for ** ("A" and "A")
+main:4: error: Unsupported left operand type for ** ("C")
+
+[case testSubtypesInOperatorAssignment]
+
+a, b = None, None # type: (A, B)
+b += b
+b += a
+a += b
+
+class A:
+    def __add__(self, x: 'A') -> 'B': pass
+
+class B(A): pass
+[out]
+
+[case testAdditionalOperatorsInOpAssign]
+
+a, c = None, None # type: (A, C)
+a &= a  # Fail
+a >>= a # Fail
+a //= a # Fail
+a &= c
+a >>= c
+a //= c
+class A:
+    def __and__(self, x: 'C') -> 'A': pass
+    def __rshift__(self, x: 'C') -> 'A': pass
+    def __floordiv__(self, x: 'C') -> 'A': pass
+class C: pass
+[out]
+main:3: error: Unsupported operand types for & ("A" and "A")
+main:4: error: Unsupported operand types for >> ("A" and "A")
+main:5: error: Unsupported operand types for // ("A" and "A")
+
+[case testInplaceOperatorMethods]
+import typing
+class A:
+    def __iadd__(self, x: int) -> 'A': pass
+    def __imul__(self, x: str) -> 'A': pass
+    def __imatmul__(self, x: str) -> 'A': pass
+a = A()
+a += 1
+a *= ''
+a @= ''
+a += '' # E: Argument 1 to "__iadd__" of "A" has incompatible type "str"; expected "int"
+a *= 1  # E: Argument 1 to "__imul__" of "A" has incompatible type "int"; expected "str"
+a @= 1  # E: Argument 1 to "__imatmul__" of "A" has incompatible type "int"; expected "str"
+
+[case testInplaceSetitem]
+class A(object):
+    def __init__(self):
+        self.a = 0
+
+    def __iadd__(self, a):
+        # type: (int) -> A
+        self.a += 1
+        return self
+
+a = A()
+b = [a]
+b[0] += 1
+[builtins fixtures/list.pyi]
+[out]
+
+
+-- Assert statement
+-- ----------------
+
+
+[case testAssert]
+import typing
+assert None + None # Fail
+assert None
+[out]
+main:2: error: Unsupported left operand type for + (None)
+
+
+-- Exception handling
+-- ------------------
+
+
+[case testRaiseStatement]
+
+e = None # type: BaseException
+f = None # type: MyError
+a = None # type: A
+raise a # Fail
+raise e
+raise f
+class A: pass
+class MyError(BaseException): pass
+[builtins fixtures/exception.pyi]
+[out]
+main:5: error: Exception must be derived from BaseException
+
+[case testRaiseClassobject]
+import typing
+class A: pass
+class MyError(BaseException): pass
+def f(): pass
+raise BaseException
+raise MyError
+raise A # E: Exception must be derived from BaseException
+raise object # E: Exception must be derived from BaseException
+raise f # E: Exception must be derived from BaseException
+[builtins fixtures/exception.pyi]
+
+[case testRaiseFromStatement]
+
+e = None # type: BaseException
+f = None # type: MyError
+a = None # type: A
+raise e from a # E: Exception must be derived from BaseException
+raise e from e
+raise e from f
+class A: pass
+class MyError(BaseException): pass
+[builtins fixtures/exception.pyi]
+
+[case testRaiseFromClassobject]
+import typing
+class A: pass
+class MyError(BaseException): pass
+def f(): pass
+raise BaseException from BaseException
+raise BaseException from MyError
+raise BaseException from A # E: Exception must be derived from BaseException
+raise BaseException from object # E: Exception must be derived from BaseException
+raise BaseException from f # E: Exception must be derived from BaseException
+[builtins fixtures/exception.pyi]
+
+[case testTryFinallyStatement]
+import typing
+try:
+    b = object() # type: A # Fail
+finally:
+    c = object() # type: A # Fail
+class A: pass
+[out]
+main:3: error: Incompatible types in assignment (expression has type "object", variable has type "A")
+main:5: error: Incompatible types in assignment (expression has type "object", variable has type "A")
+
+[case testSimpleTryExcept]
+
+try:
+  pass
+except BaseException as e:
+  a, o = None, None # type: (BaseException, object)
+  e = a
+  e = o # Fail
+class A: pass
+class B: pass
+[builtins fixtures/exception.pyi]
+[out]
+main:7: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException")
+
+[case testTypeErrorInBlock]
+
+while object:
+  x = None # type: A
+  x = object()
+  x = B()
+class A: pass
+class B: pass
+[out]
+main:4: error: Incompatible types in assignment (expression has type "object", variable has type "A")
+main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testTypeErrorInvolvingBaseException]
+
+x, a = None, None # type: (BaseException, A)
+a = BaseException()  # Fail
+a = object()         # Fail
+x = object()         # Fail
+x = A()              # Fail
+x = BaseException()
+class A: pass
+[builtins fixtures/exception.pyi]
+[out]
+main:3: error: Incompatible types in assignment (expression has type "BaseException", variable has type "A")
+main:4: error: Incompatible types in assignment (expression has type "object", variable has type "A")
+main:5: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException")
+main:6: error: Incompatible types in assignment (expression has type "A", variable has type "BaseException")
+
+[case testSimpleTryExcept2]
+import typing
+try:
+  pass
+except BaseException as e:
+  e = object() # Fail
+  e = BaseException()
+[builtins fixtures/exception.pyi]
+[out]
+main:5: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException")
+
+[case testBaseClassAsExceptionTypeInExcept]
+import typing
+try:
+  pass
+except Err as e:
+  e = BaseException() # Fail
+  e = Err()
+class Err(BaseException): pass
+[builtins fixtures/exception.pyi]
+[out]
+main:5: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err")
+
+[case testMultipleExceptHandlers]
+import typing
+try:
+    pass
+except BaseException as e:
+    pass
+except Err as f:
+    f = BaseException() # Fail
+    f = Err()
+class Err(BaseException): pass
+[builtins fixtures/exception.pyi]
+[out]
+main:7: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err")
+
+[case testTryExceptStatement]
+import typing
+try:
+    a = B() # type: A       # Fail
+except BaseException as e:
+    e = A()             # Fail
+    e = Err()
+except Err as f:
+    f = BaseException() # Fail
+    f = Err()
+class A: pass
+class B: pass
+class Err(BaseException): pass
+[builtins fixtures/exception.pyi]
+[out]
+main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:5: error: Incompatible types in assignment (expression has type "A", variable has type "BaseException")
+main:8: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err")
+
+[case testTryExceptWithinFunction]
+import typing
+def f() -> None:
+  try: pass
+  except BaseException as e:
+    e = object() # Fail
+    e = BaseException()
+  except Err as f:
+    f = BaseException() # Fail
+    f = Err()
+class Err(BaseException): pass
+[builtins fixtures/exception.pyi]
+[out]
+main:5: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException")
+main:8: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err")
+
+[case testTryExceptFlow]
+def f() -> None:
+  x = 1
+  try:
+    pass
+  except:
+    raise
+  x + 'a' # E: Unsupported left operand type for + ("int")
+[builtins fixtures/exception.pyi]
+[out]
+
+[case testTryWithElse]
+import typing
+try: pass
+except BaseException: pass
+else:
+  object(None) # E: Too many arguments for "object"
+[builtins fixtures/exception.pyi]
+
+[case testRedefinedFunctionInTryWithElse]
+def f() -> None: pass
+try:
+    pass
+except BaseException:
+    f2 = f
+else:
+    def f2() -> str: pass
+try:
+    pass
+except BaseException:
+    f3 = f
+else:
+    def f3() -> None: pass
+[builtins fixtures/exception.pyi]
+[out]
+main:7: error: Incompatible redefinition (redefinition with type Callable[[], str], original type Callable[[], None])
+
+[case testExceptWithoutType]
+import typing
+try:
+    -None # E: Unsupported operand type for unary - (None)
+except:
+    ~None # E: Unsupported operand type for ~ (None)
+[builtins fixtures/exception.pyi]
+
+[case testRaiseWithoutArgument]
+import typing
+try:
+    None
+except:
+    raise
+[builtins fixtures/exception.pyi]
+
+[case testExceptWithMultipleTypes]
+import typing
+class E1(BaseException): pass
+class E2(E1): pass
+try:
+    pass
+except (E1, E2): pass
+except (E1, object): pass # E: Exception type must be derived from BaseException
+except (object, E2): pass # E: Exception type must be derived from BaseException
+except (E1, (E2,)): pass  # E: Exception type must be derived from BaseException
+
+except (E1, E2): pass
+except ((E1, E2)): pass
+except (((E1, E2))): pass
+[builtins fixtures/exception.pyi]
+
+[case testExceptWithMultipleTypes2]
+import typing
+class E1(BaseException): pass
+class E2(E1): pass
+try:
+    pass
+except (E1, E2) as e1:
+    x = e1 # type: E1
+    y = e1 # type: E2 # E: Incompatible types in assignment (expression has type "E1", variable has type "E2")
+except (E2, E1) as e2:
+    a = e2 # type: E1
+    b = e2 # type: E2 # E: Incompatible types in assignment (expression has type "E1", variable has type "E2")
+except (E1, E2, int) as e3: # E: Exception type must be derived from BaseException
+    pass
+[builtins fixtures/exception.pyi]
+
+[case testExceptWithMultipleTypes3]
+import typing
+class E1(BaseException): pass
+class E1_1(E1): pass
+class E1_2(E1): pass
+try: pass
+except (E1, E1_1, E1_2) as e1:
+    x = e1 # type: E1
+    y = e1 # type: E1_1 # E: Incompatible types in assignment (expression has type "E1", variable has type "E1_1")
+    z = e1 # type: E1_2 # E: Incompatible types in assignment (expression has type "E1", variable has type "E1_2")
+except (E1_1, E1_2) as e2:
+    a = e2 # type: E1
+    b = e2 # type: E1_1 # E: Incompatible types in assignment (expression has type "Union[E1_1, E1_2]", variable has type "E1_1")
+    c = e2 # type: E1_2 # E: Incompatible types in assignment (expression has type "Union[E1_1, E1_2]", variable has type "E1_2")
+[builtins fixtures/exception.pyi]
+
+[case testExceptWithAnyTypes]
+from typing import Any
+
+E1 = None  # type: Any
+class E2(BaseException): pass
+class NotBaseDerived: pass
+
+try:
+    pass
+except BaseException as e1:
+    reveal_type(e1)  # E: Revealed type is 'builtins.BaseException'
+except (E1, BaseException) as e2:
+    reveal_type(e2)  # E: Revealed type is 'Any'
+except (E1, E2) as e3:
+    reveal_type(e3)  # E: Revealed type is 'Any'
+except (E1, E2, BaseException) as e4:
+    reveal_type(e4)  # E: Revealed type is 'Any'
+
+try: pass
+except E1 as e1:
+    reveal_type(e1)  # E: Revealed type is 'Any'
+except E2 as e2:
+    reveal_type(e2)  # E: Revealed type is '__main__.E2'
+except NotBaseDerived as e3:  # E: Exception type must be derived from BaseException
+    pass
+except (NotBaseDerived, E1) as e4:  # E: Exception type must be derived from BaseException
+    pass
+except (NotBaseDerived, E2) as e5:  # E: Exception type must be derived from BaseException
+    pass
+except (NotBaseDerived, E1, E2) as e6:  # E: Exception type must be derived from BaseException
+    pass
+except (E1, E2, NotBaseDerived) as e6:  # E: Exception type must be derived from BaseException
+    pass
+[builtins fixtures/exception.pyi]
+
+[case testReuseTryExceptionVariable]
+import typing
+class E1(BaseException): pass
+class E2(BaseException): pass
+try: pass
+except E1 as e: pass
+try: pass
+except E1 as e: pass
+try: pass
+except E2 as e: pass
+e + 1 # E: Trying to read deleted variable 'e'
+e = E1() # E: Assignment to variable 'e' outside except: block
+[builtins fixtures/exception.pyi]
+
+[case testReuseDefinedTryExceptionVariable]
+import typing
+class E1(BaseException): pass
+class E2(BaseException): pass
+e = 1
+e = 1
+try: pass
+except E1 as e: pass
+e = 1 # E: Assignment to variable 'e' outside except: block
+e = E1() # E: Assignment to variable 'e' outside except: block
+[builtins fixtures/exception.pyi]
+
+[case testExceptionVariableReuseInDeferredNode1]
+def f(*a: BaseException) -> int:
+    x
+    try: pass
+    except BaseException as err: pass
+    try: pass
+    except BaseException as err: f(err)
+x = f()
+[builtins fixtures/exception.pyi]
+
+[case testExceptionVariableReuseInDeferredNode2]
+def f(*a: BaseException) -> int:
+    try: pass
+    except BaseException as err: pass
+    x
+    try: pass
+    except BaseException as err: f(err)
+x = f()
+[builtins fixtures/exception.pyi]
+
+[case testExceptionVariableReuseInDeferredNode3]
+def f(*a: BaseException) -> int:
+    try: pass
+    except BaseException as err: pass
+    try: pass
+    except BaseException as err: f(err)
+    x
+x = f()
+[builtins fixtures/exception.pyi]
+
+[case testExceptionVariableReuseInDeferredNode4]
+class EA(BaseException):
+    a = None  # type: int
+class EB(BaseException):
+    b = None  # type: str
+def f(*arg: BaseException) -> int:
+    x
+    try: pass
+    except EA as err:
+        f(err)
+        a = err.a
+        reveal_type(a)
+    try: pass
+    except EB as err:
+        f(err)
+        b = err.b
+        reveal_type(b)
+x = f()
+[builtins fixtures/exception.pyi]
+[out]
+main:11: error: Revealed type is 'builtins.int'
+main:16: error: Revealed type is 'builtins.str'
+
+[case testExceptionVariableReuseInDeferredNode5]
+class EA(BaseException):
+    a = None  # type: int
+class EB(BaseException):
+    b = None  # type: str
+def f(*arg: BaseException) -> int:
+    try: pass
+    except EA as err:
+        f(err)
+        a = err.a
+        reveal_type(a)
+    x
+    try: pass
+    except EB as err:
+        f(err)
+        b = err.b
+        reveal_type(b)
+x = f()
+[builtins fixtures/exception.pyi]
+[out]
+main:10: error: Revealed type is 'builtins.int'
+main:16: error: Revealed type is 'builtins.str'
+
+[case testExceptionVariableReuseInDeferredNode6]
+class EA(BaseException):
+    a = None  # type: int
+class EB(BaseException):
+    b = None  # type: str
+def f(*arg: BaseException) -> int:
+    try: pass
+    except EA as err:
+        f(err)
+        a = err.a
+        reveal_type(a)
+    try: pass
+    except EB as err:
+        f(err)
+        b = err.b
+        reveal_type(b)
+    x
+x = f()
+[builtins fixtures/exception.pyi]
+[out]
+main:10: error: Revealed type is 'builtins.int'
+main:15: error: Revealed type is 'builtins.str'
+
+[case testArbitraryExpressionAsExceptionType]
+import typing
+a = BaseException
+try: pass
+except a as b:
+    b = BaseException()
+    b = object() # E: Incompatible types in assignment (expression has type "object", variable has type "BaseException")
+[builtins fixtures/exception.pyi]
+
+[case testInvalidExceptionCallable]
+import typing
+def exc() -> BaseException: pass
+try: pass
+except exc as e: pass             # E: Exception type must be derived from BaseException
+except BaseException() as b: pass # E: Exception type must be derived from BaseException
+[builtins fixtures/exception.pyi]
+
+[case testTupleValueAsExceptionType]
+import typing
+def exc() -> BaseException: pass
+class E1(BaseException): pass
+class E1_1(E1): pass
+class E1_2(E1): pass
+
+exs1 = (E1, E1_1, E1_2)
+try: pass
+except exs1 as e1:
+    x = e1 # type: E1
+    y = e1 # type: E1_1 # E: Incompatible types in assignment (expression has type "E1", variable has type "E1_1")
+    z = e1 # type: E1_2 # E: Incompatible types in assignment (expression has type "E1", variable has type "E1_2")
+
+exs2 = (E1_1, E1_2)
+try: pass
+except exs2 as e2:
+    a = e2 # type: E1
+    b = e2 # type: E1_1 # E: Incompatible types in assignment (expression has type "Union[E1_1, E1_2]", variable has type "E1_1")
+    c = e2 # type: E1_2 # E: Incompatible types in assignment (expression has type "Union[E1_1, E1_2]", variable has type "E1_2")
+
+exs3 = (E1, (E1_1, (E1_2,)))
+try: pass
+except exs3 as e3: pass  # E: Exception type must be derived from BaseException
+[builtins fixtures/exception.pyi]
+
+[case testInvalidTupleValueAsExceptionType]
+import typing
+def exc() -> BaseException: pass
+class E1(BaseException): pass
+class E2(E1): pass
+
+exs1 = (E1, E2, int)
+try: pass
+except exs1 as e: pass # E: Exception type must be derived from BaseException
+[builtins fixtures/exception.pyi]
+
+[case testOverloadedExceptionType]
+from typing import overload
+class E(BaseException):
+    @overload
+    def __init__(self) -> None: pass
+    @overload
+    def __init__(self, x) -> None: pass
+try:
+    pass
+except E as e:
+    e = E()
+    e = BaseException() # E: Incompatible types in assignment (expression has type "BaseException", variable has type "E")
+[builtins fixtures/exception.pyi]
+
+[case testExceptionWithAnyBaseClass]
+from typing import Any
+E = None  # type: Any
+class EE(E): pass
+raise EE()
+raise EE
+[builtins fixtures/exception.pyi]
+
+[case testExceptionIsType]
+from typing import Type
+class B(BaseException): pass
+def f(e: Type[B]):
+    try: pass
+    except e: pass
+def g(e: Type[BaseException]):
+    try: pass
+    except e as err:
+        reveal_type(err)
+def h(e: Type[int]):
+    try: pass
+    except e: pass
+[builtins fixtures/exception.pyi]
+[out]
+main:9: error: Revealed type is 'builtins.BaseException'
+main:12: error: Exception type must be derived from BaseException
+
+
+-- Del statement
+-- -------------
+
+
+[case testDelStmtWithIndex]
+a, b = None, None # type: (A, B)
+del b[a]
+del b[b] # E: Argument 1 to "__delitem__" of "B" has incompatible type "B"; expected "A"
+del a[a] # E: "A" has no attribute "__delitem__"
+del a[b] # E: "A" has no attribute "__delitem__"
+class B:
+  def __delitem__(self, index: 'A'): pass
+class A: pass
+
+[case testDelStmtWithAttribute]
+class A:
+    def f(self): pass
+    x = 0
+a = A()
+del a.f
+del a.x
+del a.z # E: "A" has no attribute "z"
+
+[case testDelStatementWithTuple]
+class A:
+    x = 0
+a = A()
+del a.x, a.y # E: "A" has no attribute "y"
+
+
+[case testDelStatementWithAssignmentSimple]
+a = 1
+a + 1
+del a
+a + 1 # E: Trying to read deleted variable 'a'
+[builtins fixtures/ops.pyi]
+
+[case testDelStatementWithAssignmentTuple]
+a = 1
+b = 1
+del (a, b)
+b + 1 # E: Trying to read deleted variable 'b'
+[builtins fixtures/ops.pyi]
+
+[case testDelStatementWithAssignmentClass]
+class C:
+    a = 1
+
+c = C()
+c.a = 1
+c.a + 1
+del c.a
+c.a + 1
+[builtins fixtures/ops.pyi]
+
+[case testDelStatementWithConditions]
+x = 5
+del x
+if x: ...  # E: Trying to read deleted variable 'x'
+
+def f(x):
+    return x
+
+if 0: ...
+elif f(x): ...  # E: Trying to read deleted variable 'x'
+
+while x == 5: ...  # E: Trying to read deleted variable 'x'
+
+-- Yield statement
+-- ---------------
+
+
+[case testSimpleYield]
+from typing import Iterator
+def f() -> Iterator[int]:
+    yield 1
+    yield '' # E: Incompatible types in yield (actual type "str", expected type "int")
+[builtins fixtures/for.pyi]
+[out]
+
+[case testYieldInFunctionReturningGenerator]
+from typing import Generator
+def f() -> Generator[int, None, None]:
+    yield 1
+[builtins fixtures/for.pyi]
+[out]
+
+[case testYieldInFunctionReturningIterable]
+from typing import Iterable
+def f() -> Iterable[int]:
+    yield 1
+[builtins fixtures/for.pyi]
+[out]
+
+[case testYieldInFunctionReturningObject]
+def f() -> object:
+    yield 1
+[builtins fixtures/for.pyi]
+[out]
+
+[case testYieldInFunctionReturningAny]
+from typing import Any
+def f() -> Any:
+    yield object()
+[out]
+
+[case testYieldInFunctionReturningFunction]
+from typing import Callable
+def f() -> Callable[[], None]: # E: The return type of a generator function should be "Generator" or one of its supertypes
+    yield object()
+[out]
+
+[case testYieldInDynamicallyTypedFunction]
+import typing
+def f():
+    yield f
+
+[case testWithInvalidInstanceReturnType]
+import typing
+def f() -> int: # E: The return type of a generator function should be "Generator" or one of its supertypes
+    yield 1
+[builtins fixtures/for.pyi]
+[out]
+
+[case testTypeInferenceContextAndYield]
+from typing import List, Iterator
+def f() -> 'Iterator[List[int]]':
+    yield []
+    yield [object()] # E: List item 0 has incompatible type "object"
+[builtins fixtures/for.pyi]
+[out]
+
+[case testYieldAndReturnWithoutValue]
+from typing import Iterator
+def f() -> Iterator[int]:
+    yield 1
+    return
+[builtins fixtures/for.pyi]
+
+[case testYieldWithNoValue]
+from typing import Iterator
+def f() -> Iterator[None]:
+    yield
+[builtins fixtures/for.pyi]
+
+[case testYieldWithNoValueWhenValueRequired]
+from typing import Iterator
+def f() -> Iterator[int]:
+    yield  # E: Yield value expected
+[builtins fixtures/for.pyi]
+[out]
+
+[case testYieldWithExplicitNone]
+from typing import Iterator
+def f() -> Iterator[None]:
+    yield None  # E: Incompatible types in yield (actual type None, expected type None)
+[builtins fixtures/for.pyi]
+[out]
+
+
+-- Yield from statement
+-- --------------------
+
+-- Iterables
+-- ----------
+
+[case testSimpleYieldFromWithIterator]
+from typing import Iterator
+def g() -> Iterator[str]:
+    yield '42'
+def h() -> Iterator[int]:
+    yield 42
+def f() -> Iterator[str]:
+    yield from g()
+    yield from h()  # E: Incompatible types in "yield from" (actual type "int", expected type "str")
+[out]
+
+[case testYieldFromAppliedToAny]
+from typing import Any
+def g() -> Any:
+    yield object()
+def f() -> Any:
+    yield from g()
+[out]
+
+[case testYieldFromInFunctionReturningFunction]
+from typing import Iterator, Callable
+def g() -> Iterator[int]:
+    yield 42
+def f() -> Callable[[], None]:  # E: The return type of a generator function should be "Generator" or one of its supertypes
+    yield from g()
+[out]
+
+[case testYieldFromNotIterableReturnType]
+from typing import Iterator
+def g() -> Iterator[int]:
+    yield 42
+def f() -> int:  # E: The return type of a generator function should be "Generator" or one of its supertypes
+    yield from g()
+[out]
+
+[case testYieldFromNotAppliedIterator]
+from typing import Iterator
+def g() -> int:
+    return 42
+def f() -> Iterator[int]:
+    yield from g()  # E: "yield from" can't be applied to "int"
+[out]
+
+[case testYieldFromCheckIncompatibleTypesTwoIterables]
+from typing import List, Iterator
+def g() -> Iterator[List[int]]:
+    yield [2, 3, 4]
+def f() -> Iterator[List[int]]:
+    yield from g()
+    yield from [1, 2, 3]  # E: Incompatible types in "yield from" (actual type "int", expected type List[int])
+[builtins fixtures/for.pyi]
+[out]
+
+[case testYieldFromNotAppliedToNothing]
+def h():
+    yield from  # E: invalid syntax
+[out]
+
+[case testYieldFromAndYieldTogether]
+from typing import Iterator
+def f() -> Iterator[str]:
+    yield "g1 ham"
+    yield from g()
+    yield "g1 eggs"
+def g() -> Iterator[str]:
+    yield "g2 spam"
+    yield "g2 more spam"
+[out]
+
+[case testYieldFromAny]
+from typing import Iterator
+def f(a):
+    b = yield from a
+    return b
+[out]
+
+-- With statement
+-- --------------
+
+
+[case testSimpleWith]
+import typing
+class A:
+    def __enter__(self) -> None: pass
+    def __exit__(self, x, y, z) -> None: pass
+with A():
+    object(A) # E: Too many arguments for "object"
+
+[case testWithStmtAndInvalidExit]
+import typing
+class A:
+    def __enter__(self) -> None: pass
+    def __exit__(self, x, y) -> None: pass
+with A(): # E: Too many arguments for "__exit__" of "A"
+    pass
+
+[case testWithStmtAndMissingExit]
+import typing
+class A:
+    def __enter__(self) -> None: pass
+with A(): # E: "A" has no attribute "__exit__"
+    pass
+
+[case testWithStmtAndInvalidEnter]
+import typing
+class A:
+    def __enter__(self, x) -> None: pass
+    def __exit__(self, x, y, z) -> None: pass
+with A(): # E: Too few arguments for "__enter__" of "A"
+    pass
+
+[case testWithStmtAndMissingEnter]
+import typing
+class A:
+    def __exit__(self, x, y, z) -> None: pass
+with A(): # E: "A" has no attribute "__enter__"
+    pass
+
+[case testWithStmtAndMultipleExprs]
+import typing
+class A:
+    def __enter__(self) -> None: pass
+    def __exit__(self, x, y, z) -> None: pass
+class B:
+    def __enter__(self) -> None: pass
+with A(), B(): # E: "B" has no attribute "__exit__"
+    pass
+with B(), A(): # E: "B" has no attribute "__exit__"
+    pass
+
+[case testWithStmtAndResult]
+import typing
+class B: pass
+class A:
+    def __enter__(self) -> B: pass
+    def __exit__(self, x, y, z): pass
+with A() as b:
+    b = B()
+    b = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testWithStmtAndMultipleResults]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class B: pass
+class C: pass
+class A(Generic[t]):
+    def __enter__(self) -> t: pass
+    def __exit__(self, x, y, z): pass
+a_b = A() # type: A[B]
+a_c = A() # type: A[C]
+with a_b as b, a_c as c:
+    b = B()
+    c = C()
+    b = c # E: Incompatible types in assignment (expression has type "C", variable has type "B")
+    c = b # E: Incompatible types in assignment (expression has type "B", variable has type "C")
+
+[case testWithStmtAndComplexTarget]
+from typing import Tuple
+class A:
+    def __enter__(self) -> Tuple[int, str]: pass
+    def __exit__(self, x, y, z): pass
+with A() as (a, b):
+    a = 1
+    b = ''
+    a = b # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+[builtins fixtures/tuple.pyi]
+
+[case testWithStmtTypeComment]
+# flags: --fast-parser
+from typing import Union
+class A:
+    def __enter__(self) -> int: pass
+    def __exit__(self, x, y, z): pass
+
+with A():  # type: int  # E: Invalid type comment
+    pass
+
+with A() as a:  # type: int
+    pass
+
+with A() as b:  # type: str  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+    pass
+
+with A() as c:  # type: int, int  # E: Invalid tuple literal type
+    pass
+
+with A() as d:  # type: Union[int, str]
+    reveal_type(d)  # E: Revealed type is 'Union[builtins.int, builtins.str]'
+
+[case testWithStmtTupleTypeComment]
+# flags: --fast-parser
+from typing import Tuple
+class A:
+    def __enter__(self) -> Tuple[int, int]: pass
+    def __exit__(self, x, y, z): pass
+
+with A():
+    pass
+
+with A() as a:  # type: Tuple[int, int]
+    pass
+
+with A() as b:  # type: Tuple[int, str]  # E: Incompatible types in assignment (expression has type "Tuple[int, int]", variable has type "Tuple[int, str]")
+    pass
+
+with A() as (c, d):  # type: int, int
+    pass
+
+with A() as (e, f):  # type: Tuple[int, int]
+    pass
+
+with A() as (g, h):  # type: int  # E: Tuple type expected for multiple variables
+    pass
+
+with A() as (i, j):  # type: int, int, str  # E: Incompatible number of tuple items
+    pass
+
+with A() as (k, l):  # type: int, str  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+    pass
+[builtins fixtures/tuple.pyi]
+
+[case testWithStmtComplexTypeComment]
+# flags: --fast-parser
+from typing import Tuple
+class A:
+    def __enter__(self) -> Tuple[int, int]: pass
+    def __exit__(self, x, y, z): pass
+
+class B:
+    def __enter__(self) -> str: pass
+    def __exit__(self, x, y, z): pass
+
+with A() as a, A() as (b, c), B() as d:  # type: Tuple[int, int], (int, int), str
+    pass
+
+with A() as e, A() as (f, g), B() as h:  # type: Tuple[int, int], Tuple[int, int], str
+    pass
+
+with A() as i, A() as (j, k), B() as l:  # type: (int, int), (int, int), str  # E: Invalid tuple literal type
+    pass
+
+with A(), A(), B() as m, A() as n, B(), B() as o:  # type: int, Tuple[int, int]  # E: Incompatible number of types for `with` targets
+    pass
+
+with A(), B(), B() as p, A(), A():  # type: str
+    pass
+[builtins fixtures/tuple.pyi]
+
+-- Chained assignment
+-- ------------------
+
+
+[case testChainedAssignment]
+import typing
+class A: pass
+class B: pass
+x = y = A()
+x = A()
+y = A()
+x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+y = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testChainedAssignment2]
+import typing
+def f() -> None:
+    x = 1
+    y = 'x'
+    x = y = 'x' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+    x = y = 1   # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+[builtins fixtures/primitives.pyi]
+[out]
+
+[case testChainedAssignmentWithType]
+
+x = y = None # type: int
+x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+y = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+x = 1
+y = 1
+
+
+-- Star assignment
+-- ---------------
+
+
+[case testAssignListToStarExpr]
+from typing import List
+bs, cs = None, None # type: List[A], List[B]
+*bs, b = bs
+*bs, c = cs  # E: Incompatible types in assignment (expression has type List[B], variable has type List[A])
+*ns, c = cs
+nc = cs
+
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+
+-- Type aliases
+-- ------------
+
+
+[case testSimpleTypeAlias]
+import typing
+foo = int
+def f(x: foo) -> None: pass
+f(1)
+f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+[case testTypeAliasDefinedInAModule]
+import typing
+import m
+def f(x: m.foo) -> None: pass
+f(1)
+f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+[file m.py]
+import typing
+foo = int
+
+[case testTypeAliasDefinedInAModule2]
+import typing
+from m import foo
+def f(x: foo) -> None: pass
+f(1)
+f('x') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+[file m.py]
+import typing
+foo = int
+
+
+-- nonlocal and global
+-- -------------------
+
+
+[case testTypeOfGlobalUsed]
+import typing
+g = A()
+def f() -> None:
+    global g
+    g = B()
+
+class A(): pass
+class B(): pass
+[out]
+main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testTypeOfNonlocalUsed]
+import typing
+def f() -> None:
+    a = A()
+    def g() -> None:
+        nonlocal a
+        a = B()
+
+class A(): pass
+class B(): pass
+[out]
+main:6: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+[case testTypeOfOuterMostNonlocalUsed]
+import typing
+def f() -> None:
+    a = A()
+    def g() -> None:
+        a = B()
+        def h() -> None:
+            nonlocal a
+            a = A()
+            a = B()
+
+class A(): pass
+class B(): pass
+[out]
+main:8: error: Incompatible types in assignment (expression has type "A", variable has type "B")
diff --git a/test-data/unit/check-super.test b/test-data/unit/check-super.test
new file mode 100644
index 0000000..2993113
--- /dev/null
+++ b/test-data/unit/check-super.test
@@ -0,0 +1,109 @@
+-- Test cases for type checker related to super().
+
+
+-- Supertype member reference
+-- --------------------------
+
+
+[case testAccessingSupertypeMethod]
+
+class B:
+  def f(self) -> 'B': pass
+class A(B):
+  def f(self) -> 'A':
+    a, b = None, None # type: (A, B)
+    a = super().f() # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+    a = super().g() # E: "g" undefined in superclass
+    b = super().f()
+[out]
+
+[case testAccessingSuperTypeMethodWithArgs]
+from typing import Any
+class B:
+  def f(self, y: 'A') -> None: pass
+class A(B):
+  def f(self, y: Any) -> None:
+    a, b = None, None # type: (A, B)
+    super().f(b) # E: Argument 1 to "f" of "B" has incompatible type "B"; expected "A"
+    super().f(a)
+    self.f(b)
+    self.f(a)
+[out]
+
+[case testAccessingSuperInit]
+import typing
+class B:
+    def __init__(self, x: A) -> None: pass
+class A(B):
+  def __init__(self) -> None:
+    super().__init__(B(None)) # E: Argument 1 to "__init__" of "B" has incompatible type "B"; expected "A"
+    super().__init__()       # E: Too few arguments for "__init__" of "B"
+    super().__init__(A())
+[out]
+
+[case testAccessingSuperMemberWithDeepHierarchy]
+import typing
+class C:
+  def f(self) -> None: pass
+class B(C): pass
+class A(B):
+  def f(self) -> None:
+    super().g() # E: "g" undefined in superclass
+    super().f()
+[out]
+
+[case testAssignToBaseClassMethod]
+import typing
+class A:
+    def f(self) -> None: pass
+class B(A):
+    def g(self) -> None:
+        super().f = None
+[out]
+main:6: error: Invalid assignment target
+
+[case testSuperWithMultipleInheritance]
+import typing
+class A:
+  def f(self) -> None: pass
+class B:
+  def g(self, x: int) -> None: pass
+class C(A, B):
+    def f(self) -> None:
+        super().f()
+        super().g(1)
+        super().f(1) # E: Too many arguments for "f" of "A"
+        super().g() # E: Too few arguments for "g" of "B"
+        super().not_there() # E: "not_there" undefined in superclass
+[out]
+
+[case testSuperWithNew]
+class A:
+    def __new__(cls, x: int) -> 'A':
+        return object.__new__(cls)
+
+class B(A):
+    def __new__(cls, x: int, y: str = '') -> 'A':
+        super().__new__(cls, 1)
+        super().__new__(cls, 1, '')  # E: Too many arguments for "__new__" of "A"
+B('')  # E: Argument 1 to "B" has incompatible type "str"; expected "int"
+B(1)
+B(1, 'x')
+[builtins fixtures/__new__.pyi]
+
+[case testSuperWithUnknownBase]
+from typing import Any
+B = None  # type: Any
+class C(B):
+    def __init__(self, arg=0):
+        super(C, self).__init__(arg, arg=arg)
+[out]
+
+[case testSuperSilentInDynamicFunction]
+class A:
+    pass
+
+class B(A):
+    def foo(self):
+        super(B, self).foo() # Not an error
+[out]
diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test
new file mode 100644
index 0000000..a506bce
--- /dev/null
+++ b/test-data/unit/check-tuples.test
@@ -0,0 +1,927 @@
+-- Normal assignment and subtyping
+-- -------------------------------
+
+
+[case testTupleAssignmentWithTupleTypes]
+from typing import Tuple
+t1 = None # type: Tuple[A]
+t2 = None # type: Tuple[B]
+t3 = None # type: Tuple[A, A]
+t4 = None # type: Tuple[A, B]
+t5 = None # type: Tuple[B, A]
+
+t1 = t2 # E: Incompatible types in assignment (expression has type "Tuple[B]", variable has type "Tuple[A]")
+t1 = t3 # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A]")
+t3 = t1 # E: Incompatible types in assignment (expression has type "Tuple[A]", variable has type "Tuple[A, A]")
+t3 = t4 # E: Incompatible types in assignment (expression has type "Tuple[A, B]", variable has type "Tuple[A, A]")
+t3 = t5 # E: Incompatible types in assignment (expression has type "Tuple[B, A]", variable has type "Tuple[A, A]")
+
+# Ok
+t1 = t1
+t2 = t2
+t3 = t3
+t4 = t4
+t5 = t5
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+
+[case testTupleSubtyping]
+from typing import Tuple
+t1 = None # type: Tuple[A, A]
+t2 = None # type: Tuple[A, B]
+t3 = None # type: Tuple[B, A]
+
+t2 = t1  # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A, B]")
+t2 = t3  # E: Incompatible types in assignment (expression has type "Tuple[B, A]", variable has type "Tuple[A, B]")
+t3 = t1  # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[B, A]")
+t3 = t2  # E: Incompatible types in assignment (expression has type "Tuple[A, B]", variable has type "Tuple[B, A]")
+
+t1 = t2
+t1 = t3
+
+class A: pass
+class B(A): pass
+[builtins fixtures/tuple.pyi]
+
+[case testTupleCompatibilityWithOtherTypes]
+from typing import Tuple
+a, o = None, None # type: (A, object)
+t = None # type: Tuple[A, A]
+
+a = t # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "A")
+t = o # E: Incompatible types in assignment (expression has type "object", variable has type "Tuple[A, A]")
+t = a # E: Incompatible types in assignment (expression has type "A", variable has type "Tuple[A, A]")
+# TODO: callable types + tuples
+
+# Ok
+o = t
+t = None
+
+class A: pass
+[builtins fixtures/tuple.pyi]
+
+[case testNestedTupleTypes]
+from typing import Tuple
+t1 = None # type: Tuple[A, Tuple[A, A]]
+t2 = None # type: Tuple[B, Tuple[B, B]]
+
+t2 = t1 # E: Incompatible types in assignment (expression has type "Tuple[A, Tuple[A, A]]", variable has type "Tuple[B, Tuple[B, B]]")
+t1 = t2
+
+class A: pass
+class B(A): pass
+[builtins fixtures/tuple.pyi]
+
+[case testNestedTupleTypes2]
+from typing import Tuple
+t1 = None # type: Tuple[A, Tuple[A, A]]
+t2 = None # type: Tuple[B, Tuple[B, B]]
+
+t2 = t1 # E: Incompatible types in assignment (expression has type "Tuple[A, Tuple[A, A]]", variable has type "Tuple[B, Tuple[B, B]]")
+t1 = t2
+
+class A: pass
+class B(A): pass
+[builtins fixtures/tuple.pyi]
+
+[case testSubtypingWithNamedTupleType]
+from typing import Tuple
+t1 = None # type: Tuple[A, A]
+t2 = None # type: tuple
+
+t1 = t2 # E: Incompatible types in assignment (expression has type Tuple[Any, ...], variable has type "Tuple[A, A]")
+t2 = t1
+
+class A: pass
+[builtins fixtures/tuple.pyi]
+
+[case testTupleInitializationWithNone]
+from typing import Tuple
+t = None # type: Tuple[A, A]
+t = None
+class A: pass
+[builtins fixtures/tuple.pyi]
+
+
+-- Tuple expressions
+-- -----------------
+
+
+[case testTupleExpressions]
+from typing import Tuple
+t1 = None # type: tuple
+t2 = None # type: Tuple[A]
+t3 = None # type: Tuple[A, B]
+
+a, b, c = None, None, None # type: (A, B, C)
+
+t2 = ()        # E: Incompatible types in assignment (expression has type "Tuple[]", variable has type "Tuple[A]")
+t2 = (a, a)    # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A]")
+t3 = (a, a)    # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A, B]")
+t3 = (b, b)    # E: Incompatible types in assignment (expression has type "Tuple[B, B]", variable has type "Tuple[A, B]")
+t3 = (a, b, a) # E: Incompatible types in assignment (expression has type "Tuple[A, B, A]", variable has type "Tuple[A, B]")
+
+t1 = ()
+t1 = (a,)
+t2 = (a,)
+t3 = (a, b)
+t3 = (a, c)
+t3 = (None, None)
+
+class A: pass
+class B: pass
+class C(B): pass
+[builtins fixtures/tuple.pyi]
+
+[case testVoidValueInTuple]
+import typing
+(None, f()) # E: "f" does not return a value
+(f(), None) # E: "f" does not return a value
+
+def f() -> None: pass
+[builtins fixtures/tuple.pyi]
+
+
+-- Indexing
+-- --------
+
+
+[case testIndexingTuples]
+from typing import Tuple
+t1 = None # type: Tuple[A, B]
+t2 = None # type: Tuple[A]
+t3 = None # type: Tuple[A, B, C, D, E]
+a, b = None, None # type: (A, B)
+x = None # type: Tuple[A, B, C]
+y = None # type: Tuple[A, C, E]
+n = 0
+
+a = t1[1] # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+b = t1[0] # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+t1[2]     # E: Tuple index out of range
+t1[3]     # E: Tuple index out of range
+t2[1]     # E: Tuple index out of range
+t1[n]     # E: Tuple index must be an integer literal
+t3[n:]    # E: Tuple slice must be an integer literal
+b = t1[(0)] # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+a = t1[0]
+b = t1[1]
+b = t1[-1]
+a = t1[(0)]
+x = t3[0:3] # type (A, B, C)
+y = t3[0:5:2] # type (A, C, E)
+x = t3[:-2] # type (A, B, C)
+
+class A: pass
+class B: pass
+class C: pass
+class D: pass
+class E: pass
+[builtins fixtures/tuple.pyi]
+
+[case testIndexingTuplesWithNegativeIntegers]
+from typing import Tuple
+t1 = None  # type: Tuple[A, B]
+t2 = None  # type: Tuple[A]
+a, b = None, None  # type: A, B
+
+a = t1[-1] # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+b = t1[-2] # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+t1[-3]     # E: Tuple index out of range
+t1[-4]     # E: Tuple index out of range
+b = t2[(-1)] # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+a = t1[-2]
+b = t1[-1]
+a = t2[(-1)]
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+
+[case testAssigningToTupleItems]
+from typing import Tuple
+t = None # type: Tuple[A, B]
+n = 0
+
+t[0] = A() # E: Unsupported target for indexed assignment
+t[2] = A() # E: Unsupported target for indexed assignment
+t[n] = A() # E: Unsupported target for indexed assignment
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+
+
+-- Multiple assignment
+-- -------------------
+
+
+[case testMultipleAssignmentWithTuples]
+from typing import Tuple
+t1 = None # type: Tuple[A, B]
+t2 = None # type: Tuple[A, B, A]
+a, b = None, None # type: (A, B)
+
+a, a = t1 # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+b, b = t1 # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a, b, b = t2 # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+a, b = t1
+a, b, a = t2
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+
+[case testMultipleAssignmentWithInvalidNumberOfValues]
+from typing import Tuple
+t1 = None # type: Tuple[A, A, A]
+a = None # type: A
+
+a, a = t1       # E: Too many values to unpack (2 expected, 3 provided)
+a, a, a, a = t1 # E: Need more than 3 values to unpack (4 expected)
+
+a, a, a = t1
+
+class A: pass
+[builtins fixtures/tuple.pyi]
+
+[case testMultipleAssignmentWithTupleExpressionRvalue]
+
+a, b = None, None # type: (A, B)
+
+a, b = a, a # Fail
+a, b = b, a # Fail
+
+a, b = a, b
+a, a = a, a
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+[out]
+main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A")
+main:5: error: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+[case testSubtypingInMultipleAssignment]
+
+a, b = None, None # type: (A, B)
+
+b, b = a, b # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b, b = b, a # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+a, b = b, b
+b, a = b, b
+
+class A: pass
+class B(A): pass
+[builtins fixtures/tuple.pyi]
+
+[case testInitializationWithMultipleValues]
+
+a, b = None, None # type: (A, B)
+
+a1, b1 = a, a # type: (A, B)  # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a2, b2 = b, b # type: (A, B)  # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a3, b3 = a # type: (A, B)     # E: '__main__.A' object is not iterable
+a4, b4 = None # type: (A, B)  # E: 'builtins.None' object is not iterable
+a5, b5 = a, b, a # type: (A, B)  # E: Too many values to unpack (2 expected, 3 provided)
+
+ax, bx = a, b # type: (A, B)
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+
+[case testMultipleAssignmentWithNonTupleRvalue]
+
+a, b = None, None # type: (A, B)
+def f(): pass
+
+a, b = None # E: 'builtins.None' object is not iterable
+a, b = a   # E: '__main__.A' object is not iterable
+a, b = f   # E: 'def () -> Any' object is not iterable
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+
+[case testMultipleAssignmentWithIndexedLvalues]
+
+a, b = None, None # type: (A, B)
+aa, bb = None, None # type: (AA, BB)
+
+a[a], b[b] = a, bb   # E: Incompatible types in assignment (expression has type "A", target has type "AA")
+a[a], b[b] = aa, b   # E: Incompatible types in assignment (expression has type "B", target has type "BB")
+a[aa], b[b] = aa, bb # E: Invalid index type "AA" for "A"; expected type "A"
+a[a], b[bb] = aa, bb # E: Invalid index type "BB" for "B"; expected type "B"
+a[a], b[b] = aa, bb
+
+class A:
+    def __setitem__(self, x: 'A', y: 'AA') -> None: pass
+class B:
+    def __setitem__(self, x: 'B', y: 'BB') -> None: pass
+
+class AA: pass
+class BB: pass
+[builtins fixtures/tuple.pyi]
+
+[case testMultipleDeclarationWithParentheses]
+
+(a, b) = (None, None) # type: int, str
+a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+b = 1  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+a = 1
+b = ''
+
+[case testMultipleAssignmentWithExtraParentheses]
+
+a, b = None, None # type: (A, B)
+
+(a, b) = (a, a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+(a, b) = (b, b) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+((a), (b)) = ((a), (a))  # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+((a), (b)) = ((b), (b))  # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+[a, b] = a, a  # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+[a, b] = b, b  # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+(a, b) = (a, b)
+((a), (b)) = ((a), (b))
+[a, b] = a, b
+
+class A: pass
+class B: pass
+[builtins fixtures/tuple.pyi]
+
+[case testMultipleAssignmentUsingSingleTupleType]
+from typing import Tuple
+a, b = None, None  # type: Tuple[int, str]
+a = 1
+b = ''
+a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+b = 1  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+[case testMultipleAssignmentWithMixedVariables]
+a = b, c = 1, 1
+x, y = p, q = 1, 1
+u, v, w = r, s = 1, 1 # E: Need more than 2 values to unpack (3 expected)
+d, e = f, g, h = 1, 1 # E: Need more than 2 values to unpack (3 expected)
+
+
+-- Assignment to starred expressions
+-- ---------------------------------
+
+
+[case testAssignmentToStarMissingAnnotation]
+from typing import List
+t = 1, 2
+a, b, *c = 1, 2  # E: Need type annotation for variable
+aa, bb, *cc = t  # E: Need type annotation for variable
+[builtins fixtures/list.pyi]
+
+[case testAssignmentToStarAnnotation]
+from typing import List
+li, lo = None, None # type: List[int], List[object]
+a, b, *c = 1, 2  # type: int, int, List[int]
+c = lo  # E: Incompatible types in assignment (expression has type List[object], variable has type List[int])
+c = li
+[builtins fixtures/list.pyi]
+
+[case testAssignmentToStarCount1]
+from typing import List
+ca = None # type: List[int]
+c = [1]
+a, b, *c = 1,  # E: Need more than 1 value to unpack (2 expected)
+a, b, *c = 1, 2
+a, b, *c = 1, 2, 3
+a, b, *c = 1, 2, 3, 4
+[builtins fixtures/list.pyi]
+
+[case testAssignmentToStarCount2]
+from typing import List
+ca = None # type: List[int]
+t1 = 1,
+t2 = 1, 2
+t3 = 1, 2, 3
+t4 = 1, 2, 3, 4
+c = [1]
+a, b, *c = t1  # E: Need more than 1 value to unpack (2 expected)
+a, b, *c = t2
+a, b, *c = t3
+a, b, *c = t4
+[builtins fixtures/list.pyi]
+
+[case testAssignmentToStarFromAny]
+from typing import Any, cast
+a, c = cast(Any, 1), C()
+p, *q = a
+c = a
+c = q
+
+class C: pass
+
+[case testAssignmentToComplexStar]
+from typing import List
+li = None # type: List[int]
+a, *(li) = 1,
+a, *(b, c) = 1, 2  # E: Need more than 1 value to unpack (2 expected)
+a, *(b, c) = 1, 2, 3
+a, *(b, c) = 1, 2, 3, 4  # E: Too many values to unpack (2 expected, 3 provided)
+[builtins fixtures/list.pyi]
+
+[case testAssignmentToStarFromTupleType]
+from typing import List, Tuple
+li = None # type: List[int]
+la = None # type: List[A]
+ta = None # type: Tuple[A, A, A]
+a, *la = ta
+a, *li = ta  # E
+a, *na = ta
+na = la
+na = a  # E
+
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+main:6: error: List item 0 has incompatible type "A"
+main:6: error: List item 1 has incompatible type "A"
+main:9: error: Incompatible types in assignment (expression has type "A", variable has type List[A])
+
+[case testAssignmentToStarFromTupleInference]
+from typing import List
+li = None # type: List[int]
+la = None # type: List[A]
+a, *l = A(), A()
+l = li  # E: Incompatible types in assignment (expression has type List[int], variable has type List[A])
+l = la
+
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+
+[case testAssignmentToStarFromListInference]
+from typing import List
+li = None # type: List[int]
+la = None # type: List[A]
+a, *l = [A(), A()]
+l = li  # E: Incompatible types in assignment (expression has type List[int], variable has type List[A])
+l = la
+
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+
+[case testAssignmentToStarFromTupleTypeInference]
+from typing import List, Tuple
+li = None # type: List[int]
+la = None # type: List[A]
+ta = None # type: Tuple[A, A, A]
+a, *l = ta
+l = li  # E: Incompatible types in assignment (expression has type List[int], variable has type List[A])
+l = la
+
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+
+[case testAssignmentToStarFromListTypeInference]
+from typing import List
+li = None # type: List[int]
+la = None # type: List[A]
+a, *l = la
+l = li  # E: Incompatible types in assignment (expression has type List[int], variable has type List[A])
+l = la
+
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+
+
+-- Nested tuple assignment
+-- ----------------------------
+
+
+[case testNestedTupleAssignment1]
+
+a1, b1, c1 = None, None, None # type: (A, B, C)
+a2, b2, c2 = None, None, None # type: (A, B, C)
+
+a1, (b1, c1) = a2, (b2, c2)
+a1, (a1, (b1, c1)) = a2, (a2, (b2, c2))
+a1, (a1, (a1, b1)) = a1, (a1, (a1, c1))  # Fail
+
+class A: pass
+class B: pass
+class C: pass
+[out]
+main:7: error: Incompatible types in assignment (expression has type "C", variable has type "B")
+
+[case testNestedTupleAssignment2]
+
+a1, b1, c1 = None, None, None # type: (A, B, C)
+a2, b2, c2 = None, None, None # type: (A, B, C)
+t = a1, b1
+
+a2, b2 = t
+(a2, b2), c2 = t, c1
+(a2, c2), c2 = t, c1  # Fail
+t, c2 = (a2, b2), c2
+t, c2 = (a2, a2), c2  # Fail
+t = a1, a1, a1  # Fail
+t = a1  # Fail
+a2, a2, a2 = t  # Fail
+a2, = t  # Fail
+a2 = t  # Fail
+
+class A: pass
+class B: pass
+class C: pass
+[out]
+main:8: error: Incompatible types in assignment (expression has type "B", variable has type "C")
+main:10: error: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A, B]")
+main:11: error: Incompatible types in assignment (expression has type "Tuple[A, A, A]", variable has type "Tuple[A, B]")
+main:12: error: Incompatible types in assignment (expression has type "A", variable has type "Tuple[A, B]")
+main:13: error: Need more than 2 values to unpack (3 expected)
+main:14: error: Too many values to unpack (1 expected, 2 provided)
+main:15: error: Incompatible types in assignment (expression has type "Tuple[A, B]", variable has type "A")
+
+
+-- Error messages
+-- --------------
+
+
+[case testTupleErrorMessages]
+
+a = None # type: A
+
+(a, a) + a  # E: Unsupported left operand type for + ("Tuple[A, A]")
+a + (a, a)  # E: Unsupported operand types for + ("A" and "Tuple[A, A]")
+f((a, a))   # E: Argument 1 to "f" has incompatible type "Tuple[A, A]"; expected "A"
+(a, a).foo  # E: "Tuple[A, A]" has no attribute "foo"
+
+def f(x: 'A') -> None: pass
+
+class A:
+    def __add__(self, x: 'A') -> 'A': pass
+[builtins fixtures/tuple.pyi]
+
+[case testLargeTuplesInErrorMessages]
+
+a = None # type: LongTypeName
+a + (a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a, a) # Fail
+
+class LongTypeName:
+    def __add__(self, x: 'LongTypeName') -> 'LongTypeName': pass
+[builtins fixtures/tuple.pyi]
+[out]
+main:3: error: Unsupported operand types for + ("LongTypeName" and tuple(length 50))
+
+
+-- Tuple methods
+-- -------------
+
+
+[case testTupleMethods]
+from typing import Tuple
+t = None # type: Tuple[int, str]
+i = 0
+s = ''
+b = bool()
+
+s = t.__len__()  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+i = t.__str__()  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+i = s in t       # E: Incompatible types in assignment (expression has type "bool", variable has type "int")
+t.foo            # E: "Tuple[int, str]" has no attribute "foo"
+
+i = t.__len__()
+s = t.__str__()
+b = s in t
+
+[file builtins.py]
+from typing import TypeVar, Generic
+_T = TypeVar('_T')
+class object:
+    def __init__(self) -> None: pass
+class tuple(Generic[_T]):
+    def __len__(self) -> int: pass
+    def __str__(self) -> str: pass
+    def __contains__(self, o: object) -> bool: pass
+class int: pass
+class str: pass
+class bool: pass
+class type: pass
+class function: pass
+
+
+-- For loop over tuple
+-- -------------------
+
+
+[case testForLoopOverTuple]
+import typing
+t = 1, 2
+for x in t:
+    x = 1
+    x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+[builtins fixtures/for.pyi]
+
+[case testForLoopOverEmptyTuple]
+import typing
+t = ()
+for x in t: pass # E: Need type annotation for variable
+[builtins fixtures/for.pyi]
+
+[case testForLoopOverNoneValuedTuple]
+import typing
+t = ()
+for x in None, None: pass # E: Need type annotation for variable
+[builtins fixtures/for.pyi]
+
+[case testForLoopOverTupleAndSubtyping]
+import typing
+class A: pass
+class B(A): pass
+for x in B(), A():
+    x = A()
+    x = B()
+    x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "A")
+[builtins fixtures/for.pyi]
+
+[case testTupleIterable]
+y = 'a'
+x = sum((1,2))
+y = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+[builtins fixtures/tuple.pyi]
+
+
+-- Tuple as a base type
+-- --------------------
+
+
+[case testTupleBaseClass]
+import m
+[file m.pyi]
+from typing import Tuple
+class A(Tuple[int, str]):
+    def f(self, x: int) -> None:
+        a, b = 1, ''
+        a, b = self
+        b, a = self  # Error
+        self.f('')   # Error
+[builtins fixtures/tuple.pyi]
+[out]
+tmp/m.pyi:6: error: Incompatible types in assignment (expression has type "int", variable has type "str")
+tmp/m.pyi:6: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+tmp/m.pyi:7: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+
+[case testValidTupleBaseClass2]
+from typing import Tuple
+class A(Tuple[int, str]): pass
+
+x, y = A()
+reveal_type(x) # E: Revealed type is 'builtins.int'
+reveal_type(y) # E: Revealed type is 'builtins.str'
+
+x1 = A()[0] # type: int
+x2 = A()[1] # type: int # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+A()[2] # E: Tuple index out of range
+
+class B(Tuple[int, ...]): pass
+
+z1 = B()[0] # type: int
+z2 = B()[1] # type: str # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+B()[100]
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testValidTupleBaseClass]
+from typing import Tuple
+class A(tuple): pass
+[out]
+
+[case testTupleBaseClass2-skip]
+import m
+[file m.pyi]
+# This doesn't work correctly -- no errors are reported (#867)
+from typing import Tuple
+a = None # type: A
+class A(Tuple[int, str]): pass
+x, y = a
+x() # Expected: "int" not callable
+y() # Expected: "str" not callable
+[out]
+(should fail)
+
+[case testGenericClassWithTupleBaseClass]
+from typing import TypeVar, Generic, Tuple
+T = TypeVar('T')
+class Test(Generic[T], Tuple[T]): pass
+x = Test() # type: Test[int]
+[builtins fixtures/tuple.pyi]
+[out]
+main:4: error: Generic tuple types not supported
+
+
+-- Variable-length tuples (Tuple[t, ...] with literal '...')
+-- ---------------------------------------------------------
+
+
+[case testIndexingVariableLengthTuple]
+from typing import Tuple
+x = () # type: Tuple[str, ...]
+n = 5
+x[n]() # E: "str" not callable
+x[3]() # E: "str" not callable
+[builtins fixtures/tuple.pyi]
+
+[case testSubtypingVariableLengthTuple]
+from typing import Tuple
+class A: pass
+class B(A): pass
+def fa(t: Tuple[A, ...]) -> None: pass
+def fb(t: Tuple[B, ...]) -> None: pass
+ta = () # type: Tuple[A, ...]
+tb = () # type: Tuple[B, ...]
+fa(ta)
+fa(tb)
+fb(tb)
+fb(ta) # E: Argument 1 to "fb" has incompatible type Tuple[A, ...]; expected Tuple[B, ...]
+[builtins fixtures/tuple.pyi]
+
+[case testSubtypingFixedAndVariableLengthTuples]
+from typing import Tuple
+class A: pass
+class B(A): pass
+def fa(t: Tuple[A, ...]) -> None: pass
+def fb(t: Tuple[B, ...]) -> None: pass
+aa = (A(), A())
+ab = (A(), B())
+bb = (B(), B())
+fa(aa)
+fa(ab)
+fa(bb)
+fb(bb)
+fb(ab) # E: Argument 1 to "fb" has incompatible type "Tuple[A, B]"; expected Tuple[B, ...]
+fb(aa) # E: Argument 1 to "fb" has incompatible type "Tuple[A, A]"; expected Tuple[B, ...]
+[builtins fixtures/tuple.pyi]
+
+[case testSubtypingTupleIsContainer]
+from typing import Container
+a = None  # type: Container[str]
+a = ()
+
+[case testSubtypingTupleIsSized]
+from typing import Sized
+a = None  # type: Sized
+a = ()
+
+[case testTupleWithStarExpr1]
+# flags: --fast-parser
+a = (1, 2)
+b = (*a, '')
+reveal_type(b)  # E: Revealed type is 'Tuple[builtins.int, builtins.int, builtins.str]'
+
+[case testTupleWithStarExpr2]
+a = [1]
+b = (0, *a)
+reveal_type(b)  # E: Revealed type is 'builtins.tuple[builtins.int*]'
+[builtins fixtures/tuple.pyi]
+
+[case testTupleWithStarExpr3]
+a = ['']
+b = (0, *a)
+reveal_type(b)  # E: Revealed type is 'builtins.tuple[builtins.object*]'
+c = (*a, '')
+reveal_type(c)  # E: Revealed type is 'builtins.tuple[builtins.str*]'
+[builtins fixtures/tuple.pyi]
+
+[case testTupleWithStarExpr4]
+a = (1, 1, 'x', 'x')
+b = (1, 'x')
+a = (0, *b, '')
+[builtins fixtures/tuple.pyi]
+
+[case testTupleMeetTupleAny]
+from typing import Union, Tuple
+class A: pass
+class B: pass
+
+def f(x: Union[B, Tuple[A, A]]) -> None:
+    if isinstance(x, tuple):
+        reveal_type(x) # E: Revealed type is 'Tuple[__main__.A, __main__.A]'
+    else:
+        reveal_type(x) # E: Revealed type is '__main__.B'
+
+def g(x: Union[str, Tuple[str, str]]) -> None:
+    if isinstance(x, tuple):
+        reveal_type(x) # E: Revealed type is 'Tuple[builtins.str, builtins.str]'
+    else:
+        reveal_type(x) # E: Revealed type is 'builtins.str'
+
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testTupleMeetTUpleAnyComplex]
+from typing import Tuple, Union
+
+Pair = Tuple[int, int]
+Variant = Union[int, Pair]
+def tuplify(v: Variant) -> None:
+    reveal_type(v) # E: Revealed type is 'Union[builtins.int, Tuple[builtins.int, builtins.int]]'
+    if not isinstance(v, tuple):
+        reveal_type(v) # E: Revealed type is 'builtins.int'
+        v = (v, v)
+        reveal_type(v) # E: Revealed type is 'Tuple[builtins.int, builtins.int]'
+    reveal_type(v) # E: Revealed type is 'Tuple[builtins.int, builtins.int]'
+    reveal_type(v[0]) # E: Revealed type is 'builtins.int'
+
+Pair2 = Tuple[int, str]
+Variant2 = Union[int, Pair2]
+def tuplify2(v: Variant2) -> None:
+    if isinstance(v, tuple):
+        reveal_type(v) # E: Revealed type is 'Tuple[builtins.int, builtins.str]'
+    else:
+        reveal_type(v) # E: Revealed type is 'builtins.int'
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testTupleMeetTupleAnyAfter]
+from typing import Tuple, Union
+
+def good(blah: Union[Tuple[int, int], int]) -> None:
+    reveal_type(blah) # E: Revealed type is 'Union[Tuple[builtins.int, builtins.int], builtins.int]'
+    if isinstance(blah, tuple):
+        reveal_type(blah) # E: Revealed type is 'Tuple[builtins.int, builtins.int]'
+    reveal_type(blah) # E: Revealed type is 'Union[Tuple[builtins.int, builtins.int], builtins.int]'
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testTupleMeetTupleVariable]
+from typing import Tuple, TypeVar, Generic, Union
+T = TypeVar('T')
+
+class A: pass
+class B1(A): pass
+class B2(A): pass
+class C: pass
+
+x = None # type: Tuple[A, ...]
+y = None # type: Tuple[Union[B1, C], Union[B2, C]]
+
+def g(x: T) -> Tuple[T, T]:
+    return (x, x)
+
+z = 1
+x, y = g(z) # E: Argument 1 to "g" has incompatible type "int"; expected "Tuple[B1, B2]"
+[builtins fixtures/tuple.pyi]
+[out]
+
+[case testTupleWithUndersizedContext]
+a = ([1], 'x')
+a = ([], 'x', 1)  # E: Incompatible types in assignment (expression has type "Tuple[List[int], str, int]", variable has type "Tuple[List[int], str]")
+[builtins fixtures/tuple.pyi]
+
+[case testTupleWithOversizedContext]
+a = (1, [1], 'x')
+a = (1, [])  # E: Incompatible types in assignment (expression has type "Tuple[int, List[int]]", variable has type "Tuple[int, List[int], str]")
+[builtins fixtures/tuple.pyi]
+
+[case testTupleWithoutContext]
+a = (1, [])  # E: Need type annotation for variable
+[builtins fixtures/tuple.pyi]
+
+[case testTupleWithUnionContext]
+from typing import List, Union, Tuple
+def f() -> Union[int, Tuple[List[str]]]:
+    return ([],)
+[builtins fixtures/tuple.pyi]
+
+[case testTupleWithVariableSizedTupleContext]
+from typing import List, Tuple
+def f() -> Tuple[List[str], ...]:
+    return ([],)
+[builtins fixtures/tuple.pyi]
+
+[case testTupleWithoutArgs]
+from typing import Tuple
+def f(a: Tuple) -> None: pass
+f(())
+f((1,))
+f(('', ''))
+f(0)  # E: Argument 1 to "f" has incompatible type "int"; expected Tuple[Any, ...]
+[builtins fixtures/tuple.pyi]
+
+[case testTupleSingleton]
+# flags: --fast-parser
+from typing import Tuple
+def f(a: Tuple[()]) -> None: pass
+f(())
+f((1,))  # E: Argument 1 to "f" has incompatible type "Tuple[int]"; expected "Tuple[]"
+f(('', ''))  # E: Argument 1 to "f" has incompatible type "Tuple[str, str]"; expected "Tuple[]"
+f(0)  # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[]"
+[builtins fixtures/tuple.pyi]
diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test
new file mode 100644
index 0000000..20022cc
--- /dev/null
+++ b/test-data/unit/check-type-aliases.test
@@ -0,0 +1,74 @@
+[case testSimpleTypeAlias]
+import typing
+i = int
+def f(x: i) -> None: pass
+f(1)
+f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+[case testUnionTypeAlias]
+from typing import Union
+U = Union[int, str]
+def f(x: U) -> None: pass
+f(1)
+f('')
+f(()) # E: Argument 1 to "f" has incompatible type "Tuple[]"; expected "Union[int, str]"
+
+[case testTupleTypeAlias]
+from typing import Tuple
+T = Tuple[int, str]
+def f(x: T) -> None: pass
+f((1, 'x'))
+f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[int, str]"
+
+[case testCallableTypeAlias]
+from typing import Callable
+A = Callable[[int], None]
+f = None  # type: A
+f(1)
+f('') # E: Argument 1 has incompatible type "str"; expected "int"
+
+[case testListTypeAlias]
+from typing import List
+A = List[int]
+def f(x: A) -> None: pass
+f([1])
+f(['x']) # E: List item 0 has incompatible type "str"
+[builtins fixtures/list.pyi]
+[out]
+
+[case testAnyTypeAlias]
+from typing import Any
+A = Any
+def f(x: A) -> None:
+    x.foo()
+f(1)
+f('x')
+
+[case testImportUnionAlias]
+import typing
+from _m import U
+def f(x: U) -> None: pass
+f(1)
+f('x')
+f(()) # E: Argument 1 to "f" has incompatible type "Tuple[]"; expected "Union[int, str]"
+[file _m.py]
+from typing import Union
+U = Union[int, str]
+[builtins fixtures/tuple.pyi]
+
+[case testTypeAliasInBuiltins]
+def f(x: bytes): pass
+bytes
+f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "str"
+[builtins fixtures/alias.pyi]
+
+[case testEmptyTupleTypeAlias]
+from typing import Tuple, Callable
+EmptyTuple = Tuple[()]
+x = None # type: EmptyTuple
+reveal_type(x)  # E: Revealed type is 'Tuple[]'
+
+EmptyTupleCallable = Callable[[Tuple[()]], None]
+f = None # type: EmptyTupleCallable
+reveal_type(f)  # E: Revealed type is 'def (Tuple[])'
+[builtins fixtures/list.pyi]
diff --git a/test-data/unit/check-type-checks.test b/test-data/unit/check-type-checks.test
new file mode 100644
index 0000000..c4905a7
--- /dev/null
+++ b/test-data/unit/check-type-checks.test
@@ -0,0 +1,113 @@
+-- Conditional type checks.
+
+
+[case testSimpleIsinstance]
+
+x = None  # type: object
+n = None  # type: int
+s = None  # type: str
+n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+if isinstance(x, int):
+    n = x
+    s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+[builtins fixtures/isinstance.pyi]
+
+[case testSimpleIsinstance2]
+import typing
+def f(x: object, n: int, s: str) -> None:
+    n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+    if isinstance(x, int):
+        n = x
+        s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+    n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testSimpleIsinstance3]
+
+class A:
+    x = None  # type: object
+    n = None  # type: int
+    s = None  # type: str
+    n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+    if isinstance(x, int):
+        n = x
+        s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+    else:
+        n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testMultipleIsinstanceTests]
+import typing
+class A: pass
+class B(A): pass
+def f(x: object, a: A, b: B, c: int) -> None:
+    if isinstance(x, A):
+        if isinstance(x, B):
+            b = x
+            x = a
+        a = x
+        c = x # E: Incompatible types in assignment (expression has type "A", variable has type "int")
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testMultipleIsinstanceTests2]
+import typing
+class A: pass
+class B(A): pass
+def f(x: object, y: object, n: int, s: str) -> None:
+    if isinstance(x, int):
+        if isinstance(y, str):
+            n = x
+            s = y
+            s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+            n = y # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+        s = y # E: Incompatible types in assignment (expression has type "object", variable has type "str")
+        n = y # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+        n = x
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceAndElif]
+import typing
+def f(x: object, n: int, s: str) -> None:
+    n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+    if isinstance(x, int):
+        n = x
+        s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+    elif isinstance(x, str):
+        s = x
+        n = x # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+    else:
+        n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+        s = x # E: Incompatible types in assignment (expression has type "object", variable has type "str")
+    n = x # E: Incompatible types in assignment (expression has type "object", variable has type "int")
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceAndAnyType]
+from typing import Any
+def f(x: Any, n: int, s: str) -> None:
+    s = x
+    if isinstance(x, int):
+        n = x
+        s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+    s = x
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceAndGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class C(Generic[T]):
+    def f(self, x: T) -> None: pass
+def f(x: object) -> None:
+    if isinstance(x, C):
+        x.f(1)
+        x.f('')
+        x.g() # E: C[Any] has no attribute "g"
+    x.g() # E: "object" has no attribute "g"
+[builtins fixtures/isinstance.pyi]
+[out]
diff --git a/test-data/unit/check-type-promotion.test b/test-data/unit/check-type-promotion.test
new file mode 100644
index 0000000..0a39996
--- /dev/null
+++ b/test-data/unit/check-type-promotion.test
@@ -0,0 +1,39 @@
+-- Test cases for type promotion (e.g. int -> float).
+
+
+[case testPromoteIntToFloat]
+def f(x: float) -> None: pass
+f(1)
+[builtins fixtures/primitives.pyi]
+
+[case testCantPromoteFloatToInt]
+def f(x: int) -> None: pass
+f(1.1) # E: Argument 1 to "f" has incompatible type "float"; expected "int"
+[builtins fixtures/primitives.pyi]
+
+[case testPromoteFloatToComplex]
+def f(x: complex) -> None: pass
+f(1)
+[builtins fixtures/primitives.pyi]
+
+[case testPromoteIntToComplex]
+def f(x: complex) -> None: pass
+f(1)
+[builtins fixtures/primitives.pyi]
+
+[case testPromoteBytearrayToByte]
+def f(x: bytes) -> None: pass
+f(bytearray())
+[builtins fixtures/primitives.pyi]
+
+[case testNarrowingDownFromPromoteTargetType]
+y = 0.0
+y = 1
+y() # E: "int" not callable
+[builtins fixtures/primitives.pyi]
+
+[case testNarrowingDownFromPromoteTargetType2]
+y = 0.0
+y = 1
+y.x # E: "int" has no attribute "x"
+[builtins fixtures/primitives.pyi]
diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test
new file mode 100644
index 0000000..424c8b2
--- /dev/null
+++ b/test-data/unit/check-typeddict.test
@@ -0,0 +1,462 @@
+-- Create Instance
+
+[case testCanCreateTypedDictInstanceWithKeywordArguments]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+p = Point(x=42, y=1337)
+reveal_type(p)  # E: Revealed type is 'TypedDict(x=builtins.int, y=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])'
+[builtins fixtures/dict.pyi]
+
+[case testCanCreateTypedDictInstanceWithDictCall]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+p = Point(dict(x=42, y=1337))
+reveal_type(p)  # E: Revealed type is 'TypedDict(x=builtins.int, y=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])'
+[builtins fixtures/dict.pyi]
+
+[case testCanCreateTypedDictInstanceWithDictLiteral]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+p = Point({'x': 42, 'y': 1337})
+reveal_type(p)  # E: Revealed type is 'TypedDict(x=builtins.int, y=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])'
+[builtins fixtures/dict.pyi]
+
+[case testCanCreateTypedDictInstanceWithNoArguments]
+from mypy_extensions import TypedDict
+EmptyDict = TypedDict('EmptyDict', {})
+p = EmptyDict()
+reveal_type(p)  # E: Revealed type is 'TypedDict(_fallback=typing.Mapping[builtins.str, builtins.None])'
+[builtins fixtures/dict.pyi]
+
+
+-- Create Instance (Errors)
+
+[case testCannotCreateTypedDictInstanceWithUnknownArgumentPattern]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+p = Point(42, 1337)  # E: Expected keyword arguments, {...}, or dict(...) in TypedDict constructor
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictInstanceNonLiteralItemName]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+x = 'x'
+p = Point({x: 42, 'y': 1337})  # E: Expected TypedDict item name to be string literal
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictInstanceWithExtraItems]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+p = Point(x=42, y=1337, z=666)  # E: Expected items ['x', 'y'] but found ['x', 'y', 'z'].
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictInstanceWithMissingItems]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+p = Point(x=42)  # E: Expected items ['x', 'y'] but found ['x'].
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictInstanceWithIncompatibleItemType]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+p = Point(x='meaning_of_life', y=1337)  # E: Incompatible types (expression has type "str", TypedDict item "x" has type "int")
+[builtins fixtures/dict.pyi]
+
+
+-- Subtyping
+
+[case testCanConvertTypedDictToItself]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+def identity(p: Point) -> Point:
+    return p
+[builtins fixtures/dict.pyi]
+
+[case testCanConvertTypedDictToEquivalentTypedDict]
+from mypy_extensions import TypedDict
+PointA = TypedDict('PointA', {'x': int, 'y': int})
+PointB = TypedDict('PointB', {'x': int, 'y': int})
+def identity(p: PointA) -> PointB:
+    return p
+[builtins fixtures/dict.pyi]
+
+[case testCannotConvertTypedDictToSimilarTypedDictWithNarrowerItemTypes]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+ObjectPoint = TypedDict('ObjectPoint', {'x': object, 'y': object})
+def convert(op: ObjectPoint) -> Point:
+    return op  # E: Incompatible return value type (got "ObjectPoint", expected "Point")
+[builtins fixtures/dict.pyi]
+
+[case testCannotConvertTypedDictToSimilarTypedDictWithWiderItemTypes]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+ObjectPoint = TypedDict('ObjectPoint', {'x': object, 'y': object})
+def convert(p: Point) -> ObjectPoint:
+    return p  # E: Incompatible return value type (got "Point", expected "ObjectPoint")
+[builtins fixtures/dict.pyi]
+
+[case testCannotConvertTypedDictToSimilarTypedDictWithIncompatibleItemTypes]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+Chameleon = TypedDict('Chameleon', {'x': str, 'y': str})
+def convert(p: Point) -> Chameleon:
+    return p  # E: Incompatible return value type (got "Point", expected "Chameleon")
+[builtins fixtures/dict.pyi]
+
+[case testCanConvertTypedDictToNarrowerTypedDict]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+Point1D = TypedDict('Point1D', {'x': int})
+def narrow(p: Point) -> Point1D:
+    return p
+[builtins fixtures/dict.pyi]
+
+[case testCannotConvertTypedDictToWiderTypedDict]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+Point3D = TypedDict('Point3D', {'x': int, 'y': int, 'z': int})
+def widen(p: Point) -> Point3D:
+    return p  # E: Incompatible return value type (got "Point", expected "Point3D")
+[builtins fixtures/dict.pyi]
+
+[case testCanConvertTypedDictToCompatibleMapping]
+from mypy_extensions import TypedDict
+from typing import Mapping
+Point = TypedDict('Point', {'x': int, 'y': int})
+def as_mapping(p: Point) -> Mapping[str, int]:
+    return p
+[builtins fixtures/dict.pyi]
+
+[case testCannotConvertTypedDictToCompatibleMapping]
+from mypy_extensions import TypedDict
+from typing import Mapping
+Point = TypedDict('Point', {'x': int, 'y': int})
+def as_mapping(p: Point) -> Mapping[str, str]:
+    return p  # E: Incompatible return value type (got "Point", expected Mapping[str, str])
+[builtins fixtures/dict.pyi]
+
+-- TODO: Fix mypy stubs so that the following passes in the test suite
+--[case testCanConvertTypedDictToAnySuperclassOfMapping]
+--from mypy_extensions import TypedDict
+--from typing import Sized, Iterable, Container
+--Point = TypedDict('Point', {'x': int, 'y': int})
+--def as_sized(p: Point) -> Sized:
+--    return p
+--def as_iterable(p: Point) -> Iterable[str]:
+--    return p
+--def as_container(p: Point) -> Container[str]:
+--    return p
+--def as_object(p: Point) -> object:
+--    return p
+--[builtins fixtures/dict.pyi]
+
+[case testCannotConvertTypedDictToDictOrMutableMapping]
+from mypy_extensions import TypedDict
+from typing import Dict, MutableMapping
+Point = TypedDict('Point', {'x': int, 'y': int})
+def as_dict(p: Point) -> Dict[str, int]:
+    return p  # E: Incompatible return value type (got "Point", expected Dict[str, int])
+def as_mutable_mapping(p: Point) -> MutableMapping[str, int]:
+    return p  # E: Incompatible return value type (got "Point", expected MutableMapping[str, int])
+[builtins fixtures/dict.pyi]
+
+[case testCanConvertTypedDictToAny]
+from mypy_extensions import TypedDict
+from typing import Any
+Point = TypedDict('Point', {'x': int, 'y': int})
+def unprotect(p: Point) -> Any:
+    return p
+[builtins fixtures/dict.pyi]
+
+
+-- Join
+
+[case testJoinOfTypedDictHasOnlyCommonKeysAndNewFallback]
+from mypy_extensions import TypedDict
+TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
+Point3D = TypedDict('Point3D', {'x': int, 'y': int, 'z': int})
+p1 = TaggedPoint(type='2d', x=0, y=0)
+p2 = Point3D(x=1, y=1, z=1)
+joined_points = [p1, p2]
+reveal_type(p1)             # E: Revealed type is 'TypedDict(type=builtins.str, x=builtins.int, y=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.object])'
+reveal_type(p2)             # E: Revealed type is 'TypedDict(x=builtins.int, y=builtins.int, z=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])'
+reveal_type(joined_points)  # E: Revealed type is 'builtins.list[TypedDict(x=builtins.int, y=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])]'
+[builtins fixtures/dict.pyi]
+
+[case testJoinOfTypedDictRemovesNonequivalentKeys]
+from mypy_extensions import TypedDict
+CellWithInt = TypedDict('CellWithInt', {'value': object, 'meta': int})
+CellWithObject = TypedDict('CellWithObject', {'value': object, 'meta': object})
+c1 = CellWithInt(value=1, meta=42)
+c2 = CellWithObject(value=2, meta='turtle doves')
+joined_cells = [c1, c2]
+reveal_type(c1)             # E: Revealed type is 'TypedDict(value=builtins.int, meta=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])'
+reveal_type(c2)             # E: Revealed type is 'TypedDict(value=builtins.int, meta=builtins.str, _fallback=typing.Mapping[builtins.str, builtins.object])'
+reveal_type(joined_cells)   # E: Revealed type is 'builtins.list[TypedDict(value=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])]'
+[builtins fixtures/dict.pyi]
+
+[case testJoinOfDisjointTypedDictsIsEmptyTypedDict]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+Cell = TypedDict('Cell', {'value': object})
+d1 = Point(x=0, y=0)
+d2 = Cell(value='pear tree')
+joined_dicts = [d1, d2]
+reveal_type(d1)             # E: Revealed type is 'TypedDict(x=builtins.int, y=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])'
+reveal_type(d2)             # E: Revealed type is 'TypedDict(value=builtins.str, _fallback=typing.Mapping[builtins.str, builtins.str])'
+reveal_type(joined_dicts)   # E: Revealed type is 'builtins.list[TypedDict(_fallback=typing.Mapping[builtins.str, builtins.None])]'
+[builtins fixtures/dict.pyi]
+
+[case testJoinOfTypedDictWithCompatibleMappingIsMapping]
+from mypy_extensions import TypedDict
+from typing import Mapping
+Cell = TypedDict('Cell', {'value': int})
+left = Cell(value=42)
+right = {'score': 999}  # type: Mapping[str, int]
+joined1 = [left, right]
+joined2 = [right, left]
+reveal_type(joined1)  # E: Revealed type is 'builtins.list[typing.Mapping*[builtins.str, builtins.int]]'
+reveal_type(joined2)  # E: Revealed type is 'builtins.list[typing.Mapping*[builtins.str, builtins.int]]'
+[builtins fixtures/dict.pyi]
+
+-- TODO: Fix mypy stubs so that the following passes in the test suite
+--[case testJoinOfTypedDictWithCompatibleMappingSupertypeIsSupertype]
+--from mypy_extensions import TypedDict
+--from typing import Sized
+--Cell = TypedDict('Cell', {'value': int})
+--left = Cell(value=42)
+--right = {'score': 999}  # type: Sized
+--joined1 = [left, right]
+--joined2 = [right, left]
+--reveal_type(joined1)  # E: Revealed type is 'builtins.list[typing.Sized*]'
+--reveal_type(joined2)  # E: Revealed type is 'builtins.list[typing.Sized*]'
+--[builtins fixtures/dict.pyi]
+
+[case testJoinOfTypedDictWithIncompatibleMappingIsObject]
+from mypy_extensions import TypedDict
+from typing import Mapping
+Cell = TypedDict('Cell', {'value': int})
+left = Cell(value=42)
+right = {'score': 'zero'}  # type: Mapping[str, str]
+joined1 = [left, right]
+joined2 = [right, left]
+reveal_type(joined1)  # E: Revealed type is 'builtins.list[builtins.object*]'
+reveal_type(joined2)  # E: Revealed type is 'builtins.list[builtins.object*]'
+[builtins fixtures/dict.pyi]
+
+[case testJoinOfTypedDictWithIncompatibleTypeIsObject]
+from mypy_extensions import TypedDict
+from typing import Mapping
+Cell = TypedDict('Cell', {'value': int})
+left = Cell(value=42)
+right = 42
+joined1 = [left, right]
+joined2 = [right, left]
+reveal_type(joined1)  # E: Revealed type is 'builtins.list[builtins.object*]'
+reveal_type(joined2)  # E: Revealed type is 'builtins.list[builtins.object*]'
+[builtins fixtures/dict.pyi]
+
+
+-- Meet
+
+[case testMeetOfTypedDictsWithCompatibleCommonKeysHasAllKeysAndNewFallback]
+from mypy_extensions import TypedDict
+from typing import TypeVar, Callable
+XY = TypedDict('XY', {'x': int, 'y': int})
+YZ = TypedDict('YZ', {'y': int, 'z': int})
+T = TypeVar('T')
+def f(x: Callable[[T, T], None]) -> T: pass
+def g(x: XY, y: YZ) -> None: pass
+reveal_type(f(g))  # E: Revealed type is 'TypedDict(x=builtins.int, y=builtins.int, z=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])'
+[builtins fixtures/dict.pyi]
+
+[case testMeetOfTypedDictsWithIncompatibleCommonKeysIsUninhabited]
+# flags: --strict-optional
+from mypy_extensions import TypedDict
+from typing import TypeVar, Callable
+XYa = TypedDict('XYa', {'x': int, 'y': int})
+YbZ = TypedDict('YbZ', {'y': object, 'z': int})
+T = TypeVar('T')
+def f(x: Callable[[T, T], None]) -> T: pass
+def g(x: XYa, y: YbZ) -> None: pass
+reveal_type(f(g))  # E: Revealed type is '<uninhabited>'
+[builtins fixtures/dict.pyi]
+
+[case testMeetOfTypedDictsWithNoCommonKeysHasAllKeysAndNewFallback]
+from mypy_extensions import TypedDict
+from typing import TypeVar, Callable
+X = TypedDict('X', {'x': int})
+Z = TypedDict('Z', {'z': int})
+T = TypeVar('T')
+def f(x: Callable[[T, T], None]) -> T: pass
+def g(x: X, y: Z) -> None: pass
+reveal_type(f(g))  # E: Revealed type is 'TypedDict(x=builtins.int, z=builtins.int, _fallback=typing.Mapping[builtins.str, builtins.int])'
+[builtins fixtures/dict.pyi]
+
+# TODO: It would be more accurate for the meet to be TypedDict instead.
+[case testMeetOfTypedDictWithCompatibleMappingIsUninhabitedForNow]
+# flags: --strict-optional
+from mypy_extensions import TypedDict
+from typing import TypeVar, Callable, Mapping
+X = TypedDict('X', {'x': int})
+M = Mapping[str, int]
+T = TypeVar('T')
+def f(x: Callable[[T, T], None]) -> T: pass
+def g(x: X, y: M) -> None: pass
+reveal_type(f(g))  # E: Revealed type is '<uninhabited>'
+[builtins fixtures/dict.pyi]
+
+[case testMeetOfTypedDictWithIncompatibleMappingIsUninhabited]
+# flags: --strict-optional
+from mypy_extensions import TypedDict
+from typing import TypeVar, Callable, Mapping
+X = TypedDict('X', {'x': int})
+M = Mapping[str, str]
+T = TypeVar('T')
+def f(x: Callable[[T, T], None]) -> T: pass
+def g(x: X, y: M) -> None: pass
+reveal_type(f(g))  # E: Revealed type is '<uninhabited>'
+[builtins fixtures/dict.pyi]
+
+# TODO: It would be more accurate for the meet to be TypedDict instead.
+[case testMeetOfTypedDictWithCompatibleMappingSuperclassIsUninhabitedForNow]
+# flags: --strict-optional
+from mypy_extensions import TypedDict
+from typing import TypeVar, Callable, Iterable
+X = TypedDict('X', {'x': int})
+I = Iterable[str]
+T = TypeVar('T')
+def f(x: Callable[[T, T], None]) -> T: pass
+def g(x: X, y: I) -> None: pass
+reveal_type(f(g))  # E: Revealed type is '<uninhabited>'
+[builtins fixtures/dict.pyi]
+
+
+-- Constraint Solver
+
+-- TODO: Figure out some way to trigger the ConstraintBuilderVisitor.visit_typeddict_type() path.
+
+
+-- Methods
+
+-- TODO: iter() doesn't accept TypedDictType as an argument type. Figure out why.
+--[case testCanCallMappingMethodsOnTypedDict]
+--from mypy_extensions import TypedDict
+--Cell = TypedDict('Cell', {'value': int})
+--c = Cell(value=42)
+--c['value']
+--iter(c)
+--len(c)
+--'value' in c
+--c.keys()
+--c.items()
+--c.values()
+--c.get('value')
+--c == c
+--c != c
+--[builtins fixtures/dict.pyi]
+
+
+-- Special Method: __getitem__
+
+[case testCanGetItemOfTypedDictWithValidStringLiteralKey]
+from mypy_extensions import TypedDict
+TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
+p = TaggedPoint(type='2d', x=42, y=1337)
+reveal_type(p['type'])  # E: Revealed type is 'builtins.str'
+reveal_type(p['x'])     # E: Revealed type is 'builtins.int'
+reveal_type(p['y'])     # E: Revealed type is 'builtins.int'
+[builtins fixtures/dict.pyi]
+
+[case testCanGetItemOfTypedDictWithValidBytesOrUnicodeLiteralKey]
+# flags: --python-version 2.7
+from mypy_extensions import TypedDict
+Cell = TypedDict('Cell', {'value': int})
+c = Cell(value=42)
+reveal_type(c['value'])   # E: Revealed type is 'builtins.int'
+reveal_type(c[u'value'])  # E: Revealed type is 'builtins.int'
+[builtins_py2 fixtures/dict.pyi]
+
+[case testCannotGetItemOfTypedDictWithInvalidStringLiteralKey]
+from mypy_extensions import TypedDict
+TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
+p = TaggedPoint(type='2d', x=42, y=1337)
+p['z']  # E: 'z' is not a valid item name; expected one of ['type', 'x', 'y']
+[builtins fixtures/dict.pyi]
+
+[case testCannotGetItemOfTypedDictWithNonLiteralKey]
+from mypy_extensions import TypedDict
+from typing import Union
+TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
+p = TaggedPoint(type='2d', x=42, y=1337)
+def get_coordinate(p: TaggedPoint, key: str) -> Union[str, int]:
+    return p[key]  # E: Cannot prove expression is a valid item name; expected one of ['type', 'x', 'y']
+[builtins fixtures/dict.pyi]
+
+
+-- Special Method: __setitem__
+
+[case testCanSetItemOfTypedDictWithValidStringLiteralKeyAndCompatibleValueType]
+from mypy_extensions import TypedDict
+TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
+p = TaggedPoint(type='2d', x=42, y=1337)
+p['type'] = 'two_d'
+p['x'] = 1
+[builtins fixtures/dict.pyi]
+
+[case testCannotSetItemOfTypedDictWithIncompatibleValueType]
+from mypy_extensions import TypedDict
+TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
+p = TaggedPoint(type='2d', x=42, y=1337)
+p['x'] = 'y'  # E: Argument 2 has incompatible type "str"; expected "int"
+[builtins fixtures/dict.pyi]
+
+[case testCannotSetItemOfTypedDictWithInvalidStringLiteralKey]
+from mypy_extensions import TypedDict
+TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
+p = TaggedPoint(type='2d', x=42, y=1337)
+p['z'] = 1  # E: 'z' is not a valid item name; expected one of ['type', 'x', 'y']
+[builtins fixtures/dict.pyi]
+
+[case testCannotSetItemOfTypedDictWithNonLiteralKey]
+from mypy_extensions import TypedDict
+from typing import Union
+TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int})
+p = TaggedPoint(type='2d', x=42, y=1337)
+def set_coordinate(p: TaggedPoint, key: str, value: int) -> None:
+    p[key] = value  # E: Cannot prove expression is a valid item name; expected one of ['type', 'x', 'y']
+[builtins fixtures/dict.pyi]
+
+
+-- Special Method: get
+
+-- TODO: Implement support for these cases:
+--[case testGetOfTypedDictWithValidStringLiteralKeyReturnsPreciseType]
+--[case testGetOfTypedDictWithInvalidStringLiteralKeyIsError]
+--[case testGetOfTypedDictWithNonLiteralKeyReturnsImpreciseType]
+
+
+-- isinstance
+
+-- TODO: Implement support for this case.
+--[case testCannotIsInstanceTypedDictType]
+
+-- scoping
+[case testTypedDictInClassNamespace]
+# https://github.com/python/mypy/pull/2553#issuecomment-266474341
+from mypy_extensions import TypedDict
+class C:
+    def f(self):
+        A = TypedDict('A', {'x': int})
+    def g(self):
+        A = TypedDict('A', {'y': int})
+C.A  # E: "C" has no attribute "A"
+[builtins fixtures/dict.pyi]
+
+[case testTypedDictInFunction]
+from mypy_extensions import TypedDict
+def f() -> None:
+    A = TypedDict('A', {'x': int})
+A  # E: Name 'A' is not defined
+[builtins fixtures/dict.pyi]
diff --git a/test-data/unit/check-typevar-values.test b/test-data/unit/check-typevar-values.test
new file mode 100644
index 0000000..5ae96f9
--- /dev/null
+++ b/test-data/unit/check-typevar-values.test
@@ -0,0 +1,505 @@
+-- Test cases for type variables with values restriction.
+
+
+[case testCallGenericFunctionWithTypeVarValueRestriction]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> None: pass
+f(1)
+f('x')
+f(object()) # E: Type argument 1 of "f" has incompatible value "object"
+
+[case testCallGenericFunctionWithTypeVarValueRestrictionUsingContext]
+from typing import TypeVar, List
+T = TypeVar('T', int, str)
+def f(x: T) -> List[T]: pass
+i = [1]
+s = ['x']
+o = [object()]
+i = f(1)
+s = f('')
+o = f(1) # E: Type argument 1 of "f" has incompatible value "object"
+[builtins fixtures/list.pyi]
+
+[case testCallGenericFunctionWithTypeVarValueRestrictionAndAnyArgs]
+from typing import TypeVar, Any, cast
+T = TypeVar('T', int, str)
+def f(x: T) -> None: pass
+f(cast(Any, object()))
+[out]
+
+[case testCallGenericFunctionWithTypeVarValueRestrictionInDynamicFunc]
+from typing import TypeVar, Any
+T = TypeVar('T', int, str)
+def f(x: T) -> None: pass
+def g():
+    f(object())
+[out]
+
+[case testCallGenericFunctionWithTypeVarValueRestrictionUsingSubtype]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> None: pass
+class S(str): pass
+f(S())
+[out]
+
+[case testCheckGenericFunctionBodyWithTypeVarValues]
+from typing import TypeVar
+class A:
+    def f(self, x: int) -> A: return self
+class B:
+    def f(self, x: int) -> B: return self
+AB = TypeVar('AB', A, B)
+def f(x: AB) -> AB:
+    x = x.f(1)
+    return x.f(1)
+
+[case testCheckGenericFunctionBodyWithTypeVarValues2]
+from typing import TypeVar
+class A:
+    def f(self) -> A: return A()
+    def g(self) -> B: return B()
+class B:
+    def f(self) -> A: return A()
+    def g(self) -> B: return B()
+AB = TypeVar('AB', A, B)
+def f(x: AB) -> AB:
+    return x.f() # Error
+def g(x: AB) -> AB:
+    return x.g() # Error
+[out]
+main:10: error: Incompatible return value type (got "A", expected "B")
+main:12: error: Incompatible return value type (got "B", expected "A")
+
+[case testTypeInferenceAndTypeVarValues]
+from typing import TypeVar
+class A:
+    def f(self) -> A: return self
+    def g(self) -> B: return B()
+class B:
+    def f(self) -> B: return self
+    def g(self) -> B: return B()
+AB = TypeVar('AB', A, B)
+def f(x: AB) -> AB:
+    y = x
+    if y:
+        return y.f()
+    else:
+        return y.g() # E: Incompatible return value type (got "B", expected "A")
+[out]
+
+[case testTypeDeclaredBasedOnTypeVarWithValues]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> T:
+    a = None  # type: T
+    b = None # type: T
+    a = x
+    b = x
+    a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+    b = 1  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+[out]
+
+[case testIsinstanceAndTypeVarValues]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> T:
+    if isinstance(x, int):
+        return 2
+def g(x: T) -> T:
+    if isinstance(x, str):
+        return ''
+def h(x: T) -> T:
+    if isinstance(x, int):
+        return '' # E: Incompatible return value type (got "str", expected "int")
+    return x
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceAndTypeVarValues2]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> T:
+    if isinstance(x, int):
+        return 2
+    else:
+        return ''
+def g(x: T) -> T:
+    if isinstance(x, int):
+        return '' # E: Incompatible return value type (got "str", expected "int")
+    else:
+        return 2  # E: Incompatible return value type (got "int", expected "str")
+    return x
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceAndTypeVarValues3]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> T:
+    if isinstance(x, int):
+        y = 1
+    else:
+        y = ''
+    return y
+[builtins fixtures/isinstance.pyi]
+
+[case testIsinstanceAndTypeVarValues4]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> T:
+    if isinstance(x, int):
+        y = 1
+    else:
+        y = object()
+    return y # E: Incompatible return value type (got "object", expected "str")
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceAndTypeVarValues5]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> T:
+    if isinstance(x, int):
+        y = object()
+    else:
+        y = ''
+    return y # E: Incompatible return value type (got "object", expected "int")
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceWithUserDefinedTypeAndTypeVarValues]
+from typing import TypeVar
+class A: pass
+class B: pass
+T = TypeVar('T', A, B)
+def f(x: T) -> None:
+    y = x
+    if isinstance(x, A):
+        # This is only checked when x is A, since A and B are not considered overlapping.
+        x = y
+        x = A()
+    else:
+        x = B()
+        x = y
+        x.foo() # E: "B" has no attribute "foo"
+S = TypeVar('S', int, str)
+def g(x: S) -> None:
+    y = x
+    if isinstance(x, int):
+        x = y
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testIsinstanceWithUserDefinedTypeAndTypeVarValues2]
+from typing import TypeVar
+class S(str): pass
+T = TypeVar('T', S, int)
+def f(x: T) -> None:
+    y = x
+    if isinstance(x, S):
+        # This is checked only when type of x is str.
+        x = y
+        x = S()
+        x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "S")
+    else:
+        x = y
+        x = 1
+        x = S() # E: Incompatible types in assignment (expression has type "S", variable has type "int")
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testTypeVarValuesAndNestedCalls]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(m: T) -> int: pass
+def h(x: int) -> int: pass
+def g(a: T) -> None:
+    h(f(a))
+[out]
+
+[case testGenericTypeWithTypevarValues]
+from typing import TypeVar, Generic, Any
+X = TypeVar('X', int, str)
+class A(Generic[X]): pass
+a = None  # type: A[int]
+b = None  # type: A[str]
+d = None  # type: A[object] # E: Type argument 1 of "A" has incompatible value "object"
+c = None  # type: A[Any]
+
+[case testConstructGenericTypeWithTypevarValuesAndTypeInference]
+from typing import TypeVar, Generic, Any, cast
+X = TypeVar('X', int, str)
+class A(Generic[X]):
+    def __init__(self, x: X) -> None: pass
+A(1)
+A('x')
+A(cast(Any, object()))
+A(object()) # E: Type argument 1 of "A" has incompatible value "object"
+
+[case testGenericTypeWithTypevarValuesAndTypevarArgument]
+from typing import TypeVar, Generic
+class C: pass
+X = TypeVar('X', int, str)
+Y = TypeVar('Y', int, C)
+Z = TypeVar('Z')
+class D(Generic[X]):
+    def __init__(self, x: X) -> None: pass
+def f(x: X) -> None:
+    a = None  # type: D[X]
+def g(x: Y) -> None:
+    a = None  # type: D[Y]
+def h(x: Z) -> None:
+    a = None  # type: D[Z]
+[out]
+main:11: error: Invalid type argument value for "D"
+main:13: error: Type variable "Z" not valid as type argument value for "D"
+
+[case testGenericTypeWithTypevarValuesAndSubtypePromotion]
+from typing import TypeVar, Generic
+X = TypeVar('X', int, str)
+class S(str): pass
+class C(Generic[X]):
+    def __init__(self, x: X) -> None: pass
+x = None  # type: C[str]
+y = C(S())
+x = y
+y = x
+c_int = C(1) # type: C[int]
+y = c_int # E: Incompatible types in assignment (expression has type C[int], variable has type C[str])
+
+[case testGenericTypeBodyWithTypevarValues]
+from typing import TypeVar, Generic
+class A:
+    def f(self, x: int) -> None: pass
+    def g(self, x: int) -> None: pass
+    def h(self, x: str) -> None: pass
+class B:
+    def f(self, x: int) -> None: pass
+    def g(self, x: str) -> None: pass
+    def h(self, x: int) -> None: pass
+X = TypeVar('X', A, B)
+class C(Generic[X]):
+    def f(self, x: X) -> None:
+        x.f(1)
+        x.g(1) # E: Argument 1 to "g" of "B" has incompatible type "int"; expected "str"
+        x.h(1) # E: Argument 1 to "h" of "A" has incompatible type "int"; expected "str"
+[out]
+
+[case testAttributeInGenericTypeWithTypevarValues1]
+from typing import TypeVar, Generic
+X = TypeVar('X', int, str)
+class C(Generic[X]):
+    x = None  # type: X
+    def f(self, x: X) -> None:
+        self.x = x
+        self.x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+[out]
+
+[case testAttributeInGenericTypeWithTypevarValues2]
+from typing import TypeVar, Generic
+X = TypeVar('X', int, str)
+class C(Generic[X]):
+    x = None  # type: X
+cn = C() # type: C[int]
+cn.x = 1
+cn.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+cs = C() # type: C[str]
+cs.x = ''
+cs.x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+[case testInferredAttributeInGenericClassBodyWithTypevarValues]
+from typing import TypeVar, Generic
+X = TypeVar('X', int, str)
+class C(Generic[X]):
+    x = 1
+C.x = 1
+C.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testMultipleClassTypevarsWithValues1]
+from typing import TypeVar, Generic
+class A:
+    def f(self, x: int) -> None: pass
+class B:
+    def f(self, x: str) -> None: pass
+X = TypeVar('X', A, B)
+Y = TypeVar('Y', int, str)
+class C(Generic[X, Y]):
+    def f(self, x: X, y: Y) -> None:
+        x.f(y)
+[out]
+main:10: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int"
+main:10: error: Argument 1 to "f" of "B" has incompatible type "int"; expected "str"
+
+[case testMultipleClassTypevarsWithValues2]
+from typing import TypeVar, Generic
+class A: pass
+class B: pass
+X = TypeVar('X', A, B)
+Y = TypeVar('Y', int, str)
+class C(Generic[X, Y]): pass
+a = None  # type: C[A, int]
+b = None  # type: C[B, str]
+c = None  # type: C[int, int] # E: Type argument 1 of "C" has incompatible value "int"
+d = None  # type: C[A, A]     # E: Type argument 2 of "C" has incompatible value "A"
+
+[case testCallGenericFunctionUsingMultipleTypevarsWithValues]
+from typing import TypeVar
+class A: pass
+class B: pass
+X = TypeVar('X', A, B)
+Y = TypeVar('Y', int, str)
+def f(x: X, y: Y) -> None: pass
+f(A(), '')
+f(B(), 1)
+f(A(), A()) # E: Type argument 2 of "f" has incompatible value "A"
+f(1, 1) # E: Type argument 1 of "f" has incompatible value "int"
+
+[case testGenericFunctionWithNormalAndRestrictedTypevar]
+from typing import TypeVar, Generic
+X = TypeVar('X')
+Y = TypeVar('Y', int, str)
+class C(Generic[Y]):
+    def __init__(self, y: Y) -> None: pass
+def f(x: X, y: Y, z: int) -> None:
+    C(y)
+    C(x)  # Error
+    z = x # Error
+    z = y # Error
+    y.foo # Error
+[out]
+main:8: error: Type argument 1 of "C" has incompatible value "X"
+main:9: error: Incompatible types in assignment (expression has type "X", variable has type "int")
+main:10: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+main:11: error: "int" has no attribute "foo"
+main:11: error: "str" has no attribute "foo"
+
+[case testTypeVarWithValueInferredFromObjectReturnTypeContext]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def c1(x: object) -> None: pass
+def c2(x: int) -> None: pass
+def c3(x: str) -> None: pass
+def g(x: T) -> T: pass
+c1(g(''))
+c2(g(1))
+c3(g(''))
+c2(g(''))  # E: Argument 1 to "c2" has incompatible type "str"; expected "int"
+c3(g(1))   # E: Argument 1 to "c3" has incompatible type "int"; expected "str"
+
+[case testTypeVarWithValueInferredFromObjectReturnTypeContext2]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+class ss(str): pass
+def c(x: ss) -> None: pass
+def g(x: T) -> T: pass
+c(g(''))
+c(g(1))
+[out]
+main:6: error: Argument 1 to "c" has incompatible type "str"; expected "ss"
+main:7: error: Argument 1 to "c" has incompatible type "int"; expected "ss"
+
+
+-- Special cases
+-- -------------
+
+
+[case testTypevarValuesSpecialCase1]
+from typing import TypeVar, Generic
+from abc import abstractmethod
+T = TypeVar('T', int, str)
+class A(Generic[T]):
+    @abstractmethod
+    def f(self) -> 'A[T]': pass
+class B(A[str]):
+    @abstractmethod
+    def f(self) -> 'B': pass
+class C(A[str]):
+    @abstractmethod
+    def f(self) -> int: # E: Return type of "f" incompatible with supertype "A"
+        pass
+[out]
+
+[case testDefaultArgumentValueInGenericClassWithTypevarValues]
+from typing import TypeVar, Generic
+T = TypeVar('T', int, str)
+class C(Generic[T]):
+    def f(self, x: int = None) -> None: pass
+
+[case testTypevarValuesWithOverloadedFunctionSpecialCase]
+from typing import TypeVar, overload, Callable
+
+T = TypeVar('T', int, str)
+def f(x: T) -> None:
+     y = m(g, x)
+     x = y
+     y = object()
+
+A = TypeVar('A')
+R = TypeVar('R')
+def m(f: Callable[[A], R], it: A) -> A: pass
+
+ at overload
+def g(x: int) -> int: return x
+ at overload
+def g(x: str) -> str: return x
+[out]
+main:7: error: Incompatible types in assignment (expression has type "object", variable has type "int")
+main:7: error: Incompatible types in assignment (expression has type "object", variable has type "str")
+
+[case testGenericFunctionSubtypingWithTypevarValues]
+from typing import TypeVar
+class A: pass
+T = TypeVar('T', int, str)
+U = TypeVar('U', str, A, int)
+def f(x: T) -> T: pass
+def g(x: U) -> U: pass
+a = f
+a = f
+a = g
+b = g
+b = g
+b = f # E: Incompatible types in assignment (expression has type Callable[[T], T], variable has type Callable[[U], U])
+
+[case testInnerFunctionWithTypevarValues]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+U = TypeVar('U', int, str)
+def outer(x: T) -> T:
+    def inner(y: T) -> T:
+        return x
+    def inner2(y: U) -> U:
+        return y
+    inner(x)
+    inner(3) # E: Argument 1 to "inner" has incompatible type "int"; expected "str"
+    inner2(x)
+    inner2(3)
+    outer(3)
+    return x
+[out]
+
+[case testInnerFunctionMutualRecursionWithTypevarValues]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def outer(x: T) -> T:
+    def inner1(y: T) -> T:
+        return inner2(y)
+    def inner2(y: T) -> T:
+        return inner1('a') # E: Argument 1 to "inner1" has incompatible type "str"; expected "int"
+    return inner1(x)
+[out]
+
+[case testClassMemberTypeVarInFunctionBody]
+from typing import TypeVar
+class C:
+    T = TypeVar('T', int)
+    def f(self, x: T) -> T:
+        A = C.T
+        return x
+
+[case testParameterLessGenericAsRestriction]
+from typing import Sequence, Iterable, TypeVar
+S = TypeVar('S', Sequence, Iterable)
+def my_len(s: S) -> None: pass
+def crash() -> None: my_len((0,))
diff --git a/test-data/unit/check-underscores.test b/test-data/unit/check-underscores.test
new file mode 100644
index 0000000..a1d88cb
--- /dev/null
+++ b/test-data/unit/check-underscores.test
@@ -0,0 +1,16 @@
+[case testUnderscoresRequire36]
+# flags: --fast-parser --python-version 3.5
+x = 1000_000  # E: Underscores in numeric literals are only supported in Python 3.6
+[out]
+
+[case testUnderscoresSyntaxError]
+# flags: --fast-parser --python-version 3.6
+x = 1000_000_  # E: invalid token
+[out]
+
+[case testUnderscoresBasics]
+# flags: --fast-parser --python-version 3.6
+x: int
+x = 1000_000
+x = 0x_FF_FF_FF_FF
+y: str = 1000_000.000_001  # E: Incompatible types in assignment (expression has type "float", variable has type "str")
diff --git a/test-data/unit/check-unions.test b/test-data/unit/check-unions.test
new file mode 100644
index 0000000..c51cb06
--- /dev/null
+++ b/test-data/unit/check-unions.test
@@ -0,0 +1,219 @@
+-- Type checking of union types
+
+[case testUnion1]
+from typing import Union
+def f(x: Union[int, str]) -> None:
+    if isinstance(x, int):
+        y = 1
+        y = x
+    elif isinstance(x, str):
+        z = 'a'
+        z = x
+[builtins fixtures/isinstance.pyi]
+
+[case testUnion2]
+from typing import Union
+def f(x: Union[int, str]) -> None:
+    if isinstance(x, int):
+        y = 1
+        y = x
+    else:
+        z = 'a'
+        z = x
+[builtins fixtures/isinstance.pyi]
+
+[case testUnion3]
+from typing import Union
+def f(x: Union[int, str]) -> None:
+    if isinstance(x, int):
+        y = 1
+        y = x
+    else:
+        z = 2
+        z = x # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testUnionAnyIsInstance]
+from typing import Any, Union
+
+def func(v:Union[int, Any]) -> None:
+    if isinstance(v, int):
+        reveal_type(v) # E: Revealed type is 'builtins.int'
+    else:
+        reveal_type(v) # E: Revealed type is 'Any'
+[builtins fixtures/isinstance.pyi]
+[out]
+
+[case testUnionAttributeAccess]
+from typing import Union
+
+class A: y = 1
+class B: y = 2
+class C: pass
+
+w = None # type: Union[A, B]
+x = None # type: Union[A, C]
+y = None # type: int
+z = None # type: str
+
+y = w.y
+z = w.y       # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+w.y = 'a'     # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+y = x.y       # E: Some element of union has no attribute "y"
+z = x.y       # E: Some element of union has no attribute "y"
+
+[builtins fixtures/isinstance.pyi]
+
+[case testUnionMethodCalls]
+from typing import Union
+
+class A:
+    def foo(self) -> int: pass
+class B:
+    def foo(self) -> int: pass
+class C:
+    def foo(self) -> str: pass
+
+x = None # type: Union[A, B]
+y = None # type: Union[A, C]
+i = None # type: int
+
+x.foo()
+y.foo()
+i = x.foo()
+i = y.foo()   # E: Incompatible types in assignment (expression has type "Union[int, str]", variable has type "int")
+
+[builtins fixtures/isinstance.pyi]
+
+[case testUnionIndexing]
+from typing import Union, List
+x = None # type: Union[List[int], str]
+x[2]
+x[2] + 1 # E: Unsupported operand types for + (likely involving Union)
+[builtins fixtures/isinstancelist.pyi]
+
+[case testUnionAsOverloadArg]
+from typing import Union, overload
+ at overload
+def f(x: Union[int, str]) -> int: pass
+ at overload
+def f(x: type) -> str: pass
+x = 0
+x = f(1)
+x = f('')
+s = ''
+s = f(int)
+s = f(1)    # E: Incompatible types in assignment (expression has type "int", variable has type "str")
+x = f(int)  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testUnionWithNoneItem]
+from typing import Union
+def f() -> Union[int, None]: pass
+x = 1
+x = f()
+
+[case testOptional]
+from typing import Optional
+def f(x: Optional[int]) -> None: pass
+f(1)
+f(None)
+f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+
+[case testUnionSimplificationGenericFunction]
+from typing import TypeVar, Union, List
+T = TypeVar('T')
+def f(x: List[T]) -> Union[T, int]: pass
+def g(y: str) -> None: pass
+a = f([1])
+g(a) # E: Argument 1 to "g" has incompatible type "int"; expected "str"
+[builtins fixtures/list.pyi]
+
+[case testUnionSimplificationGenericClass]
+from typing import TypeVar, Union, Generic
+T = TypeVar('T')
+U = TypeVar('U')
+class C(Generic[T, U]):
+    def f(self, x: str) -> Union[T, U]: pass
+a = C() # type: C[int, int]
+b = a.f('a')
+a.f(b) # E: Argument 1 to "f" of "C" has incompatible type "int"; expected "str"
+
+[case testUnionOrderEquivalence]
+from typing import Union
+
+def foo(): pass
+
+S = str
+T = int
+
+if foo():
+    def f(x: Union[int, str]) -> None: pass
+elif foo():
+    def f(x: Union[str, int]) -> None: pass
+elif foo():
+    def f(x: Union[int, str, int, int, str]) -> None: pass
+elif foo():
+    def f(x: Union[int, str, float]) -> None: pass  # E: All conditional function variants must have identical signatures
+elif foo():
+    def f(x: Union[S, T]) -> None: pass
+elif foo():
+    def f(x: Union[str]) -> None: pass  # E: All conditional function variants must have identical signatures
+else:
+    def f(x: Union[Union[int, T], Union[S, T], str]) -> None: pass
+
+# Checks bidirectionality of testing. The first definition of g is consistent with
+# the second, but not vice-versa.
+if foo():
+    def g(x: Union[int, str, bytes]) -> None: pass
+else:
+    def g(x: Union[int, str]) -> None: pass  # E: All conditional function variants must have identical signatures
+
+[case testUnionSimplificationSpecialCases]
+from typing import Any, TypeVar, Union
+
+class C(Any): pass
+
+T = TypeVar('T')
+S = TypeVar('S')
+def u(x: T, y: S) -> Union[S, T]: pass
+
+a = None # type: Any
+
+reveal_type(u(C(), None))  # E: Revealed type is '__main__.C*'
+reveal_type(u(None, C()))  # E: Revealed type is '__main__.C*'
+
+# This will be fixed later
+reveal_type(u(C(), a))  # E: Revealed type is 'Any'
+reveal_type(u(a, C()))  # E: Revealed type is 'Any'
+
+reveal_type(u(C(), C()))  # E: Revealed type is '__main__.C*'
+reveal_type(u(a, a))  # E: Revealed type is 'Any'
+
+[case testUnionSimplificationSpecialCase2]
+from typing import Any, TypeVar, Union
+
+class C(Any): pass
+
+T = TypeVar('T')
+S = TypeVar('S')
+def u(x: T, y: S) -> Union[S, T]: pass
+
+def f(x: T) -> None:
+    reveal_type(u(C(), x)) # E: Revealed type is 'Union[T`-1, __main__.C*]'
+    reveal_type(u(x, C())) # E: Revealed type is 'Union[__main__.C*, T`-1]'
+
+[case testUnionSimplificationSpecialCase3]
+from typing import Any, TypeVar, Generic, Union
+
+class C(Any): pass
+
+V = TypeVar('V')
+T = TypeVar('T')
+
+class M(Generic[V]):
+    def get(self, default: T) -> Union[V, T]: ...
+
+def f(x: M[C]) -> None:
+    y = x.get(None)
+    reveal_type(y) # E: Revealed type is '__main__.C'
diff --git a/test-data/unit/check-unreachable-code.test b/test-data/unit/check-unreachable-code.test
new file mode 100644
index 0000000..a18a42b
--- /dev/null
+++ b/test-data/unit/check-unreachable-code.test
@@ -0,0 +1,459 @@
+-- Type checker test cases for conditional checks that result in some
+-- blocks classified as unreachable (they are not type checked or semantically
+-- analyzed).
+--
+-- For example, we skip blocks that will not be executed on the active
+-- Python version.
+
+[case testConditionalTypeAliasPY3]
+import typing
+def f(): pass
+PY3 = f()
+if PY3:
+    t = int
+    x = object() + 'x' # E: Unsupported left operand type for + ("object")
+else:
+    t = str
+    y = 'x' / 1
+x
+z = 1 # type: t
+
+[case testConditionalTypeAliasPY3_python2]
+import typing
+def f(): pass
+PY3 = f()
+if PY3:
+    t = int
+    x = object() + 'x'
+else:
+    t = str
+    y = 'x' / 1 # E: "str" has no attribute "__div__"
+y
+z = '' # type: t
+
+[case testConditionalAssignmentPY2]
+import typing
+def f(): pass
+PY2 = f()
+if PY2:
+    x = object() + 'x'
+else:
+    y = 'x' / 1 # E: Unsupported left operand type for / ("str")
+y
+
+[case testConditionalAssignmentPY2_python2]
+import typing
+def f(): pass
+PY2 = f()
+if PY2:
+    x = object() + 'x' # E: Unsupported left operand type for + ("object")
+else:
+    y = 'x' / 1
+x
+
+[case testConditionalImport]
+import typing
+def f(): pass
+PY2 = f()
+if PY2:
+    import fuzzybar
+    from barbar import *
+    from pawwaw import a, bc
+else:
+    import m
+[file m.py]
+import typing
+x = 1
+x = 'a'
+[out]
+tmp/m.py:3: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testNegatedMypyConditional]
+import typing
+MYPY = 0
+if not MYPY:
+    import xyz753
+else:
+    import pow123 # E
+[builtins fixtures/bool.pyi]
+[out]
+main:6: error: Cannot find module named 'pow123'
+main:6: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testMypyConditional]
+import typing
+MYPY = 0
+if MYPY:
+    None + 1 # E: Unsupported left operand type for + (None)
+else:
+    None + ''
+[builtins fixtures/bool.pyi]
+
+[case testTypeCheckingConditional]
+import typing
+if typing.TYPE_CHECKING:
+    import pow123 # E
+else:
+    import xyz753
+[out]
+main:3: error: Cannot find module named 'pow123'
+main:3: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testTypeCheckingConditionalFromImport]
+from typing import TYPE_CHECKING
+if TYPE_CHECKING:
+    import pow123 # E
+else:
+    import xyz753
+[out]
+main:3: error: Cannot find module named 'pow123'
+main:3: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testNegatedTypeCheckingConditional]
+import typing
+if not typing.TYPE_CHECKING:
+    import pow123 # E
+else:
+    import xyz753
+[builtins fixtures/bool.pyi]
+[out]
+main:5: error: Cannot find module named 'xyz753'
+main:5: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testUndefinedTypeCheckingConditional]
+if not TYPE_CHECKING: # E
+    import pow123
+else:
+    import xyz753
+[builtins fixtures/bool.pyi]
+[out]
+main:1: error: Name 'TYPE_CHECKING' is not defined
+main:4: error: Cannot find module named 'xyz753'
+main:4: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testConditionalClassDefPY3]
+def f(): pass
+PY3 = f()
+if PY3:
+    pass
+else:
+    class X(object):
+        pass
+
+[case testUnreachabilityAndElifPY3]
+def f(): pass
+PY3 = f()
+if PY3:
+    pass
+elif bool():
+    import nonexistent
+    1 + ''
+else:
+    import bad_name
+    1 + ''
+[builtins fixtures/bool.pyi]
+[out]
+
+[case testSysVersionInfo_python2]
+import sys
+if sys.version_info[0] >= 3:
+    def foo():
+        # type: () -> int
+        return 0
+else:
+    def foo():
+        # type: () -> str
+        return ''
+reveal_type(foo())  # E: Revealed type is 'builtins.str'
+[builtins_py2 fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfo]
+import sys
+if sys.version_info[0] >= 3:
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+reveal_type(foo())  # E: Revealed type is 'builtins.int'
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoNegated_python2]
+import sys
+if not (sys.version_info[0] < 3):
+    def foo():
+        # type: () -> int
+        return 0
+else:
+    def foo():
+        # type: () -> str
+        return ''
+reveal_type(foo())  # E: Revealed type is 'builtins.str'
+[builtins_py2 fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoNegated]
+import sys
+if not (sys.version_info[0] < 3):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+reveal_type(foo())  # E: Revealed type is 'builtins.int'
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoSliced1]
+import sys
+if sys.version_info[:1] >= (3,):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+foo() + 0
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoSliced2]
+import sys
+if sys.version_info[:2] >= (3, 0):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+foo() + 0
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoSliced3]
+import sys
+if sys.version_info[:] >= (3, 0):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+foo() + 0
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoSliced4]
+import sys
+if sys.version_info[0:2] >= (3, 0):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+foo() + 0
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoSliced5]
+import sys
+if sys.version_info[0:] >= (3,):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+foo() + 0
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoSliced6]
+import sys
+if sys.version_info[1:] >= (5,):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+foo() + 0
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoSliced7]
+import sys
+if sys.version_info >= (3, 5):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+foo() + 0
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoSliced8]
+# Our pyversion only has (major, minor),
+# so testing for (major, minor, bugfix) is unsupported.
+import sys
+if sys.version_info >= (3, 5, 0):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''  # E: All conditional function variants must have identical signatures
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoSliced9]
+# Our pyversion only has (major, minor),
+# so testing for (minor, bugfix) is unsupported (also it's silly :-).
+import sys
+if sys.version_info[1:] >= (5, 0):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''  # E: All conditional function variants must have identical signatures
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysPlatform1]
+import sys
+if sys.platform == 'fictional':
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+foo() + ''
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysPlatform2]
+import sys
+if sys.platform != 'fictional':
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+foo() + 0
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysPlatformNegated]
+import sys
+if not (sys.platform == 'fictional'):
+    def foo() -> int: return 0
+else:
+    def foo() -> str: return ''
+foo() + 0
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoClass]
+import sys
+if sys.version_info < (3, 5):
+    class C:
+        pass
+else:
+    class C:
+        def foo(self) -> int: return 0
+C().foo() + 0
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoImport]
+import sys
+if sys.version_info >= (3, 5):
+    import collections
+else:
+    collections = None
+Pt = collections.namedtuple('Pt', 'x y z')
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoVariable]
+import sys
+if sys.version_info >= (3, 5):
+    x = ''
+else:
+    x = 0
+x + ''
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoInClass]
+import sys
+class C:
+    if sys.version_info >= (3, 5):
+        def foo(self) -> int: return 0
+    else:
+        def foo(self) -> str: return ''
+reveal_type(C().foo())  # E: Revealed type is 'builtins.int'
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysVersionInfoInFunction]
+import sys
+def foo() -> None:
+    if sys.version_info >= (3, 5):
+        x = ''
+    else:
+        x = 0
+    reveal_type(x)  # E: Revealed type is 'builtins.str'
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysPlatformInMethod]
+import sys
+class C:
+    def foo(self) -> None:
+        if sys.platform != 'fictional':
+            x = ''
+        else:
+            x = 0
+        reveal_type(x)  # E: Revealed type is 'builtins.str'
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testSysPlatformInFunctionImport]
+import sys
+def foo() -> None:
+    if sys.platform != 'fictional':
+        import a
+    else:
+        import b as a
+    a.x
+[file a.py]
+x = 1
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testCustomSysVersionInfo]
+# flags: --python-version 3.2
+import sys
+if sys.version_info == (3, 2):
+    x = "foo"
+else:
+    x = 3
+reveal_type(x)  # E: Revealed type is 'builtins.str'
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testCustomSysVersionInfo2]
+# flags: --python-version 3.1
+import sys
+if sys.version_info == (3, 2):
+    x = "foo"
+else:
+    x = 3
+reveal_type(x)  # E: Revealed type is 'builtins.int'
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testCustomSysPlatform]
+# flags: --platform linux
+import sys
+if sys.platform == 'linux':
+    x = "foo"
+else:
+    x = 3
+reveal_type(x)  # E: Revealed type is 'builtins.str'
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testCustomSysPlatform2]
+# flags: --platform win32
+import sys
+if sys.platform == 'linux':
+    x = "foo"
+else:
+    x = 3
+reveal_type(x)  # E: Revealed type is 'builtins.int'
+[builtins fixtures/ops.pyi]
+[out]
+
+[case testCustomSysPlatformStartsWith]
+# flags: --platform win32
+import sys
+if sys.platform.startswith('win'):
+    x = "foo"
+else:
+    x = 3
+reveal_type(x)  # E: Revealed type is 'builtins.str'
+[builtins fixtures/ops.pyi]
+[out]
diff --git a/test-data/unit/check-unsupported.test b/test-data/unit/check-unsupported.test
new file mode 100644
index 0000000..7f36e69
--- /dev/null
+++ b/test-data/unit/check-unsupported.test
@@ -0,0 +1,15 @@
+-- Tests for unsupported features
+
+
+[case testDecorateOverloadedFunction]
+# The error messages are not the most informative ever.
+def d(x): pass
+ at d
+def f(): pass
+def f(x): pass # E
+def g(): pass
+ at d # E
+def g(x): pass
+[out]
+main:5: error: Name 'f' already defined
+main:7: error: Name 'g' already defined
diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test
new file mode 100644
index 0000000..cad1dad
--- /dev/null
+++ b/test-data/unit/check-varargs.test
@@ -0,0 +1,592 @@
+-- Test cases for the type checker related to varargs.
+
+
+-- Varargs within body
+-- -------------------
+
+
+[case testVarArgsWithinFunction]
+from typing import Tuple
+def f( *b: 'B') -> None:
+    ab = None # type: Tuple[B, ...]
+    ac = None # type: Tuple[C, ...]
+    b = ac # E: Incompatible types in assignment (expression has type Tuple[C, ...], variable has type Tuple[B, ...])
+    ac = b # E: Incompatible types in assignment (expression has type Tuple[B, ...], variable has type Tuple[C, ...])
+    b = ab
+    ab = b
+
+class B: pass
+class C: pass
+[builtins fixtures/tuple.pyi]
+[out]
+
+
+[case testVarArgsAreTuple]
+from typing import Tuple, Sequence
+def want_tuple(types: Tuple[type, ...]): pass
+def want_sequence(types: Sequence[type]): pass
+def test(*t: type) -> None:
+    want_tuple(t)
+    want_sequence(t)
+[builtins fixtures/tuple.pyi]
+[out]
+
+
+-- Calling varargs function
+-- ------------------------
+
+
+[case testCallingVarArgsFunction]
+
+a = None # type: A
+b = None # type: B
+c = None # type: C
+
+f(c)       # E: Argument 1 to "f" has incompatible type "C"; expected "A"
+f(a, b, c) # E: Argument 3 to "f" has incompatible type "C"; expected "A"
+f(g())     # E: "g" does not return a value
+f(a, g())  # E: "g" does not return a value
+f()
+f(a)
+f(b)
+f(a, b, a, b)
+
+def f( *a: 'A') -> None: pass
+
+def g() -> None: pass
+
+class A: pass
+class B(A): pass
+class C: pass
+[builtins fixtures/list.pyi]
+
+[case testCallingVarArgsFunctionWithAlsoNormalArgs]
+
+a = None # type: A
+b = None # type: B
+c = None # type: C
+
+f(a)       # E: Argument 1 to "f" has incompatible type "A"; expected "C"
+f(c, c)    # E: Argument 2 to "f" has incompatible type "C"; expected "A"
+f(c, a, b, c)  # E: Argument 4 to "f" has incompatible type "C"; expected "A"
+f(c)
+f(c, a)
+f(c, b, b, a, b)
+
+def f(a: 'C', *b: 'A') -> None: pass
+
+class A: pass
+class B(A): pass
+class C: pass
+[builtins fixtures/list.pyi]
+
+[case testCallingVarArgsFunctionWithDefaultArgs]
+
+a = None # type: A
+b = None # type: B
+c = None # type: C
+
+f(a)           # E: Argument 1 to "f" has incompatible type "A"; expected "C"
+f(c, c)        # E: Argument 2 to "f" has incompatible type "C"; expected "A"
+f(c, a, b, c)  # E: Argument 4 to "f" has incompatible type "C"; expected "A"
+f()
+f(c)
+f(c, a)
+f(c, b, b, a, b)
+
+def f(a: 'C' = None, *b: 'A') -> None:
+    pass
+
+class A: pass
+class B(A): pass
+class C: pass
+[builtins fixtures/list.pyi]
+
+[case testCallVarargsFunctionWithIterable]
+from typing import Iterable
+it1 = None  # type: Iterable[int]
+it2 = None  # type: Iterable[str]
+def f(*x: int) -> None: pass
+f(*it1)
+f(*it2) # E: Argument 1 to "f" has incompatible type *Iterable[str]; expected "int"
+[builtins fixtures/for.pyi]
+
+[case testCallVarargsFunctionWithIterableAndPositional]
+# flags: --fast-parser
+from typing import Iterable
+it1 = None  # type: Iterable[int]
+def f(*x: int) -> None: pass
+f(*it1, 1, 2)
+f(*it1, 1, *it1, 2)
+f(*it1, '') # E: Argument 2 to "f" has incompatible type "str"; expected "int"
+[builtins fixtures/for.pyi]
+
+[case testCallVarargsFunctionWithTupleAndPositional]
+# flags: --fast-parser
+def f(*x: int) -> None: pass
+it1 = (1, 2)
+f(*it1, 1, 2)
+f(*it1, 1, *it1, 2)
+f(*it1, '') # E: Argument 2 to "f" has incompatible type "str"; expected "int"
+[builtins fixtures/for.pyi]
+
+
+-- Calling varargs function + type inference
+-- -----------------------------------------
+
+
+[case testTypeInferenceWithCalleeVarArgs]
+from typing import TypeVar
+T = TypeVar('T')
+a = None # type: A
+b = None # type: B
+c = None # type: C
+o = None # type: object
+
+a = f(o) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+b = f(b, a) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b = f(a, b) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+
+o = f()
+a = f(a)
+a = f(b)
+a = f(a, b, a)
+o = f(a, b, o)
+c = f(c)
+
+def f( *a: T) -> T:
+    pass
+
+class A: pass
+class B(A): pass
+class C: pass
+[builtins fixtures/list.pyi]
+
+[case testTypeInferenceWithCalleeVarArgsAndDefaultArgs]
+from typing import TypeVar
+T = TypeVar('T')
+a = None # type: A
+o = None # type: object
+
+a = f(o) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+a = f(a, o) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+a = f(a, a, o) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+a = f(a, a, a, o) # E: Incompatible types in assignment (expression has type "object", variable has type "A")
+
+a = f(a)
+a = f(a, a)
+a = f(a, a, a)
+
+def f(a: T, b: T = None, *c: T) -> T:
+    pass
+
+class A: pass
+[builtins fixtures/list.pyi]
+
+
+-- Calling normal function with varargs
+-- ------------------------------------
+
+
+[case testCallingWithListVarArgs]
+from typing import List, Any, cast
+aa = None # type: List[A]
+ab = None # type: List[B]
+a = None # type: A
+b = None # type: B
+
+f(*aa)    # Fail
+f(a, *ab) # Ok
+f(a, b)
+(cast(Any, f))(*aa)     # IDEA: Move to check-dynamic?
+(cast(Any, f))(a, *ab)  # IDEA: Move to check-dynamic?
+
+def f(a: 'A', b: 'B') -> None:
+    pass
+
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+[out]
+main:7: error: Argument 1 to "f" has incompatible type *List[A]; expected "B"
+
+[case testCallingWithTupleVarArgs]
+
+a = None # type: A
+b = None # type: B
+c = None # type: C
+cc = None # type: CC
+
+f(*(a, b, b)) # E: Argument 1 to "f" has incompatible type *"Tuple[A, B, B]"; expected "C"
+f(*(b, b, c)) # E: Argument 1 to "f" has incompatible type *"Tuple[B, B, C]"; expected "A"
+f(a, *(b, b)) # E: Argument 2 to "f" has incompatible type *"Tuple[B, B]"; expected "C"
+f(b, *(b, c)) # E: Argument 1 to "f" has incompatible type "B"; expected "A"
+f(*(a, b))    # E: Too few arguments for "f"
+f(*(a, b, c, c)) # E: Too many arguments for "f"
+f(a, *(b, c, c)) # E: Too many arguments for "f"
+f(*(a, b, c))
+f(a, *(b, c))
+f(a, b, *(c,))
+f(a, *(b, cc))
+
+def f(a: 'A', b: 'B', c: 'C') -> None: pass
+
+class A: pass
+class B: pass
+class C: pass
+class CC(C): pass
+[builtins fixtures/tuple.pyi]
+
+[case testInvalidVarArg]
+
+a = None # type: A
+
+f(*None)
+f(*a)    # E: List or tuple expected as variable arguments
+f(*(a,))
+
+def f(a: 'A') -> None:
+    pass
+
+class A: pass
+[builtins fixtures/tuple.pyi]
+
+
+-- Calling varargs function with varargs
+-- -------------------------------------
+
+
+[case testCallingVarArgsFunctionWithListVarArgs]
+from typing import List
+aa, ab, a, b = None, None, None, None # type: (List[A], List[B], A, B)
+f(*aa)           # Fail
+f(a, *aa)        # Fail
+f(b, *ab)        # Fail
+f(a, a, *ab)     # Fail
+f(a, b, *aa)     # Fail
+f(b, b, *ab)     # Fail
+g(*ab)           # Fail
+f(a, *ab)
+f(a, b, *ab)
+f(a, b, b, *ab)
+g(*aa)
+
+def f(a: 'A', *b: 'B') -> None: pass
+def g(a: 'A', *b: 'A') -> None: pass
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+[out]
+main:3: error: Argument 1 to "f" has incompatible type *List[A]; expected "B"
+main:4: error: Argument 2 to "f" has incompatible type *List[A]; expected "B"
+main:5: error: Argument 1 to "f" has incompatible type "B"; expected "A"
+main:6: error: Argument 2 to "f" has incompatible type "A"; expected "B"
+main:7: error: Argument 3 to "f" has incompatible type *List[A]; expected "B"
+main:8: error: Argument 1 to "f" has incompatible type "B"; expected "A"
+main:9: error: Argument 1 to "g" has incompatible type *List[B]; expected "A"
+
+[case testCallingVarArgsFunctionWithTupleVarArgs]
+
+a, b, c, cc = None, None, None, None # type: (A, B, C, CC)
+
+f(*(b, b, b))   # E: Argument 1 to "f" has incompatible type *"Tuple[B, B, B]"; expected "A"
+f(*(a, a, b))   # E: Argument 1 to "f" has incompatible type *"Tuple[A, A, B]"; expected "B"
+f(*(a, b, a))   # E: Argument 1 to "f" has incompatible type *"Tuple[A, B, A]"; expected "B"
+f(a, *(a, b))   # E: Argument 2 to "f" has incompatible type *"Tuple[A, B]"; expected "B"
+f(b, *(b, b))   # E: Argument 1 to "f" has incompatible type "B"; expected "A"
+f(b, b, *(b,))  # E: Argument 1 to "f" has incompatible type "B"; expected "A"
+f(a, a, *(b,))  # E: Argument 2 to "f" has incompatible type "A"; expected "B"
+f(a, b, *(a,))  # E: Argument 3 to "f" has incompatible type *"Tuple[A]"; expected "B"
+f(*())          # E: Too few arguments for "f"
+f(*(a, b, b))
+f(a, *(b, b))
+f(a, b, *(b,))
+
+def f(a: 'A', *b: 'B') -> None:
+    pass
+
+class A: pass
+class B: pass
+class C: pass
+class CC(C): pass
+[builtins fixtures/list.pyi]
+
+
+-- Varargs special cases
+-- ---------------------
+
+
+[case testDynamicVarArg]
+from typing import Any
+d, a = None, None # type: (Any, A)
+f(a, a, *d) # Fail
+f(a, *d)    # Fail
+f(*d)       # Ok
+
+g(*d)
+g(a, *d)
+g(a, a, *d)
+
+def f(a: 'A') -> None: pass
+def g(a: 'A', *b: 'A') -> None: pass
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+main:3: error: Too many arguments for "f"
+main:4: error: Too many arguments for "f"
+
+[case testListVarArgsAndSubtyping]
+from typing import List
+aa = None # type: List[A]
+ab = None # type: List[B]
+
+g(*aa) # E: Argument 1 to "g" has incompatible type *List[A]; expected "B"
+f(*aa)
+f(*ab)
+g(*ab)
+
+def f( *a: 'A') -> None:
+    pass
+
+def g( *a: 'B') -> None:
+    pass
+
+class A: pass
+class B(A): pass
+[builtins fixtures/list.pyi]
+
+[case testCallerVarArgsAndDefaultArgs]
+
+a, b = None, None # type: (A, B)
+f(*())        # Fail
+f(a, *[a])    # Fail
+f(a, b, *[a]) # Fail
+f(*(a, a, b)) # Fail
+f(*(a,))
+f(*(a, b))
+f(*(a, b, b, b))
+f(a, *[])
+f(a, *[b])
+f(a, *[b, b])
+
+def f(a: 'A', b: 'B' = None, *c: 'B') -> None:
+    pass
+
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+[out]
+main:3: error: Too few arguments for "f"
+main:4: error: Argument 2 to "f" has incompatible type *List[A]; expected "B"
+main:5: error: Argument 3 to "f" has incompatible type *List[A]; expected "B"
+main:6: error: Argument 1 to "f" has incompatible type *"Tuple[A, A, B]"; expected "B"
+
+[case testVarArgsAfterKeywordArgInCall1-skip]
+# see: mypy issue #2729
+def f(x: int, y: str) -> None: pass
+f(x=1, *[2])
+[builtins fixtures/list.pyi]
+[out]
+main:2: error: "f" gets multiple values for keyword argument "x"
+main:2: error: Argument 2 to "f" has incompatible type *List[int]; expected "str"
+
+[case testVarArgsAfterKeywordArgInCall2-skip]
+# see: mypy issue #2729
+def f(x: int, y: str) -> None: pass
+f(y='x', *[1])
+[builtins fixtures/list.pyi]
+[out]
+main:2: error: "f" gets multiple values for keyword argument "y"
+main:2: error: Argument 2 to "f" has incompatible type *List[int]; expected "str"
+
+[case testVarArgsAfterKeywordArgInCall3]
+def f(x: int, y: str) -> None: pass
+f(y='x', *(1,))
+[builtins fixtures/list.pyi]
+
+[case testVarArgsAfterKeywordArgInCall4]
+def f(x: int, *, y: str) -> None: pass
+f(y='x', *[1])
+[builtins fixtures/list.pyi]
+
+[case testVarArgsAfterKeywordArgInCall5]
+def f(x: int, *, y: str) -> None: pass
+f(y='x', *(1,))
+[builtins fixtures/list.pyi]
+
+
+-- Overloads + varargs
+-- -------------------
+
+
+[case testIntersectionTypesAndVarArgs]
+from typing import overload
+a, b = None, None # type: (A, B)
+
+b = f()        # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b = f(a)       # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b = f(a, b)    # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = f(b)       # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a = f(b, b)    # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+b = f(a, *[b]) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b = f(*())     # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b = f(*(a,))   # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+b = f(*(a, b)) # E: Incompatible types in assignment (expression has type "A", variable has type "B")
+a = f(*(b,))   # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a = f(*(b, b)) # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+a = f(*[b])    # E: Incompatible types in assignment (expression has type "B", variable has type "A")
+
+a = f()
+a = f(a)
+a = f(a, b)
+b = f(b)
+b = f(b, b)
+a = f(a, *[b])
+a = f(*())
+a = f(*(a,))
+a = f(*(a, b))
+b = f(*(b,))
+b = f(*(b, b))
+b = f(*[b])
+
+class A: pass
+class B: pass
+
+ at overload
+def f(a: A = None, *b: B) -> A: pass
+
+ at overload
+def f(a: B, *b: B) -> B: pass
+[builtins fixtures/list.pyi]
+
+
+-- Caller varargs + type inference
+-- -------------------------------
+
+
+[case testCallerVarArgsListWithTypeInference]
+from typing import List, TypeVar, Tuple
+S = TypeVar('S')
+T = TypeVar('T')
+a, b, aa = None, None, None # type: (A, B, List[A])
+
+a, b = f(*aa)    # Fail
+b, b = f(*aa)    # Fail
+a, a = f(b, *aa) # Fail
+b, b = f(b, *aa) # Fail
+b, b = f(b, b, *aa) # Fail
+a, b = f(a, *a)  # Fail
+a, b = f(*a)     # Fail
+
+a, a = f(*aa)
+b, a = f(b, *aa)
+b, a = f(b, a, *aa)
+
+def f(a: S, *b: T) -> Tuple[S, T]:
+    pass
+
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+[out]
+main:6: error: Argument 1 to "f" has incompatible type *List[A]; expected "B"
+main:7: error: Argument 1 to "f" has incompatible type *List[A]; expected "B"
+main:8: error: Argument 1 to "f" has incompatible type "B"; expected "A"
+main:9: error: Argument 2 to "f" has incompatible type *List[A]; expected "B"
+main:10: error: Argument 3 to "f" has incompatible type *List[A]; expected "B"
+main:11: error: List or tuple expected as variable arguments
+main:12: error: List or tuple expected as variable arguments
+
+[case testCallerVarArgsTupleWithTypeInference]
+from typing import TypeVar, Tuple
+S = TypeVar('S')
+T = TypeVar('T')
+a, b = None, None # type: (A, B)
+
+a, a = f(*(a, b))   # E: Argument 1 to "f" has incompatible type *"Tuple[A, B]"; expected "A"
+b, b = f(a, *(b,))  # E: Argument 1 to "f" has incompatible type "A"; expected "B"
+a, a = f(*(a, b))   # E: Argument 1 to "f" has incompatible type *"Tuple[A, B]"; expected "A"
+b, b = f(a, *(b,))  # E: Argument 1 to "f" has incompatible type "A"; expected "B"
+a, b = f(*(a, b, b)) # E: Too many arguments for "f"
+
+a, b = f(*(a, b))
+a, b = f(a, *(b,))
+
+def f(a: S, b: T) -> Tuple[S, T]: pass
+
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+
+[case testCallerVarargsAndComplexTypeInference]
+from typing import List, TypeVar, Generic, Tuple
+T = TypeVar('T')
+S = TypeVar('S')
+a, b = None, None # type: (A, B)
+ao = None # type: List[object]
+aa = None # type: List[A]
+ab = None # type: List[B]
+
+a, aa = G().f(*[a])  # Fail
+aa, a = G().f(*[a])  # Fail
+ab, aa = G().f(*[a]) # Fail
+
+ao, ao = G().f(*[a]) # E: Incompatible types in assignment (expression has type List[None], variable has type List[object])
+aa, aa = G().f(*[a]) # E: Incompatible types in assignment (expression has type List[None], variable has type List[A])
+
+class G(Generic[T]):
+    def f(self, *a: S) -> Tuple[List[S], List[T]]:
+        pass
+
+class A: pass
+class B: pass
+[builtins fixtures/list.pyi]
+[out]
+main:9: error: Incompatible types in assignment (expression has type List[A], variable has type "A")
+main:9: error: Incompatible types in assignment (expression has type List[None], variable has type List[A])
+main:10: error: Incompatible types in assignment (expression has type List[None], variable has type "A")
+main:11: error: Incompatible types in assignment (expression has type List[None], variable has type List[A])
+main:11: error: Argument 1 to "f" of "G" has incompatible type *List[A]; expected "B"
+
+
+-- Comment signatures
+-- ------------------
+
+
+[case testVarArgsAndCommentSignature]
+import typing
+def f(*x): # type: (*int) -> None
+    pass
+f(1)
+f(1, 2)
+f('') # E: Argument 1 to "f" has incompatible type "str"; expected "int"
+f(1, '') # E: Argument 2 to "f" has incompatible type "str"; expected "int"
+[builtins fixtures/list.pyi]
+
+
+-- Subtyping
+-- ---------
+
+
+[case testVarArgsFunctionSubtyping]
+from typing import Callable
+x = None # type: Callable[[int], None]
+def f(*x: int) -> None: pass
+def g(*x: str) -> None: pass
+x = f
+x = g # E: Incompatible types in assignment (expression has type Callable[[StarArg(str)], None], variable has type Callable[[int], None])
+[builtins fixtures/list.pyi]
+[out]
+
+
+-- Decorated method where self is implied by *args
+-- -----------------------------------------------
+
+[case testVarArgsCallableSelf]
+from typing import Callable
+def cm(func) -> Callable[..., None]: pass
+class C:
+    @cm
+    def foo(self) -> None: pass
+C().foo()
+C().foo(1)  # The decorator's return type says this should be okay
diff --git a/test-data/unit/check-warnings.test b/test-data/unit/check-warnings.test
new file mode 100644
index 0000000..ab5f66b
--- /dev/null
+++ b/test-data/unit/check-warnings.test
@@ -0,0 +1,132 @@
+-- Test cases for warning generation.
+
+-- Redundant casts
+-- ---------------
+
+[case testRedundantCast]
+# flags: --warn-redundant-casts
+from typing import cast
+a = 1
+b = cast(str, a)
+c = cast(int, a)
+[out]
+main:5: note: Redundant cast to "int"
+
+[case testRedundantCastWithIsinstance]
+# flags: --warn-redundant-casts
+from typing import cast, Union
+x = 1  # type: Union[int, str]
+if isinstance(x, str):
+    cast(str, x)
+[builtins fixtures/isinstance.pyi]
+[out]
+main:5: note: Redundant cast to "str"
+
+[case testCastToSuperclassNotRedundant]
+# flags: --warn-redundant-casts
+from typing import cast, TypeVar, List
+T = TypeVar('T')
+def add(xs: List[T], ys: List[T]) -> List[T]: pass
+class A: pass
+class B(A): pass
+a = A()
+b = B()
+# Without the cast, the following line would fail to type check.
+c = add([cast(A, b)], [a])
+[builtins fixtures/list.pyi]
+
+
+-- Unused 'type: ignore' comments
+-- ------------------------------
+
+[case testUnusedTypeIgnore]
+# flags: --warn-unused-ignores
+a = 1
+a = 'a' # type: ignore
+a = 2 # type: ignore # N: unused 'type: ignore' comment
+a = 'b' # E: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testUnusedTypeIgnoreImport]
+# flags: --warn-unused-ignores
+import banana # type: ignore
+import m # type: ignore
+from m import * # type: ignore
+[file m.py]
+pass
+[out]
+main:3: note: unused 'type: ignore' comment
+main:4: note: unused 'type: ignore' comment
+
+
+-- No return
+-- ---------
+
+[case testNoReturn]
+# flags: --warn-no-return
+def f() -> int:
+    pass
+
+def g() -> int:
+    if bool():
+        return 1
+[builtins fixtures/list.pyi]
+[out]
+main:5: note: Missing return statement
+
+[case testNoReturnWhile]
+# flags: --warn-no-return
+def h() -> int:
+    while True:
+        if bool():
+            return 1
+
+def i() -> int:
+    while 1:
+        if bool():
+            return 1
+        if bool():
+            break
+
+def j() -> int:
+    while 1:
+        if bool():
+            return 1
+        if bool():
+            continue
+[builtins fixtures/list.pyi]
+[out]
+main:7: note: Missing return statement
+
+[case testNoReturnExcept]
+# flags: --warn-no-return
+def f() -> int:
+    try:
+        return 1
+    except:
+        pass
+def g() -> int:
+    try:
+        pass
+    except:
+        return 1
+    else:
+        return 1
+def h() -> int:
+    try:
+        pass
+    except:
+        pass
+    else:
+        pass
+    finally:
+        return 1
+[builtins fixtures/exception.pyi]
+[out]
+main:2: note: Missing return statement
+
+[case testNoReturnEmptyBodyWithDocstring]
+def f() -> int:
+    """Return the number of peppers."""
+    # This might be an @abstractmethod, for example
+    pass
+[out]
diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test
new file mode 100644
index 0000000..9a17285
--- /dev/null
+++ b/test-data/unit/cmdline.test
@@ -0,0 +1,479 @@
+-- Tests for command line parsing
+-- ------------------------------
+--
+-- The initial line specifies the command line, in the format
+--
+--   # cmd: mypy <options>
+
+
+-- Directories/packages on the command line
+-- ----------------------------------------
+
+[case testCmdlinePackage]
+# cmd: mypy pkg
+[file pkg/__init__.py]
+[file pkg/a.py]
+undef
+[file pkg/subpkg/__init__.py]
+[file pkg/subpkg/a.py]
+undef
+import pkg.subpkg.a
+[out]
+pkg/a.py:1: error: Name 'undef' is not defined
+pkg/subpkg/a.py:1: error: Name 'undef' is not defined
+
+[case testCmdlinePackageSlash]
+# cmd: mypy pkg/
+[file pkg/__init__.py]
+[file pkg/a.py]
+undef
+[file pkg/subpkg/__init__.py]
+[file pkg/subpkg/a.py]
+undef
+import pkg.subpkg.a
+[out]
+pkg/a.py:1: error: Name 'undef' is not defined
+pkg/subpkg/a.py:1: error: Name 'undef' is not defined
+
+[case testCmdlineNonPackage]
+# cmd: mypy dir
+[file dir/a.py]
+undef
+[file dir/subdir/a.py]
+undef
+[out]
+dir/a.py:1: error: Name 'undef' is not defined
+
+[case testCmdlineNonPackageSlash]
+# cmd: mypy dir/
+[file dir/a.py]
+undef
+[file dir/subdir/a.py]
+undef
+[out]
+dir/a.py:1: error: Name 'undef' is not defined
+
+[case testCmdlinePackageContainingSubdir]
+# cmd: mypy pkg
+[file pkg/__init__.py]
+[file pkg/a.py]
+undef
+[file pkg/subdir/a.py]
+undef
+[out]
+pkg/a.py:1: error: Name 'undef' is not defined
+
+[case testCmdlineNonPackageContainingPackage]
+# cmd: mypy dir
+[file dir/a.py]
+undef
+import subpkg.a
+[file dir/subpkg/__init__.py]
+[file dir/subpkg/a.py]
+undef
+[out]
+dir/subpkg/a.py:1: error: Name 'undef' is not defined
+dir/a.py:1: error: Name 'undef' is not defined
+
+[case testBadFileEncoding]
+# cmd: mypy a.py
+[file a.py]
+# coding: uft-8
+[out]
+mypy: can't decode file 'a.py': unknown encoding: uft-8
+
+[case testCannotIgnoreDuplicateModule]
+# cmd: mypy one/mod/__init__.py two/mod/__init__.py
+[file one/mod/__init__.py]
+# type: ignore
+[file two/mod/__init__.py]
+# type: ignore
+[out]
+two/mod/__init__.py: error: Duplicate module named 'mod'
+
+[case testFlagsFile]
+# cmd: mypy @flagsfile
+[file flagsfile]
+-2
+main.py
+[file main.py]
+def f():
+    try:
+        1/0
+    except ZeroDivisionError, err:
+        print err
+
+[case testConfigFile]
+# cmd: mypy main.py
+[file mypy.ini]
+[[mypy]
+python_version = 2.7
+[file main.py]
+def f():
+    try:
+        1/0
+    except ZeroDivisionError, err:
+        print err
+
+[case testAltConfigFile]
+# cmd: mypy --config-file config.ini main.py
+[file config.ini]
+[[mypy]
+python_version = 2.7
+[file main.py]
+def f():
+    try:
+        1/0
+    except ZeroDivisionError, err:
+        print err
+
+[case testPerFileConfigSection]
+# cmd: mypy x.py y.py z.py
+[file mypy.ini]
+[[mypy]
+hide_error_context = True
+disallow_untyped_defs = True
+[[mypy-y*]
+disallow_untyped_defs = False
+[[mypy-z*]
+disallow_untyped_calls = True
+[file x.py]
+def f(a):
+    pass
+def g(a: int) -> int:
+    return f(a)
+[file y.py]
+def f(a):
+    pass
+def g(a: int) -> int:
+    return f(a)
+[file z.py]
+def f(a):
+    pass
+def g(a: int) -> int:
+    return f(a)
+[out]
+z.py:1: error: Function is missing a type annotation
+z.py:4: error: Call to untyped function "f" in typed context
+x.py:1: error: Function is missing a type annotation
+
+[case testPerFileConfigSectionMultipleMatches]
+# cmd: mypy xx.py xy.py yx.py yy.py
+[file mypy.ini]
+[[mypy]
+hide_error_context = True
+[[mypy-*x*]
+disallow_untyped_defs = True
+[[mypy-*y*]
+disallow_untyped_calls = True
+[file xx.py]
+def f(a): pass
+def g(a: int) -> int: return f(a)
+[file xy.py]
+def f(a): pass
+def g(a: int) -> int: return f(a)
+[file yx.py]
+def f(a): pass
+def g(a: int) -> int: return f(a)
+[file yy.py]
+def f(a): pass
+def g(a: int) -> int: return f(a)
+[out]
+yy.py:2: error: Call to untyped function "f" in typed context
+yx.py:1: error: Function is missing a type annotation
+yx.py:2: error: Call to untyped function "f" in typed context
+xy.py:1: error: Function is missing a type annotation
+xy.py:2: error: Call to untyped function "f" in typed context
+xx.py:1: error: Function is missing a type annotation
+
+[case testMultipleGlobConfigSection]
+# cmd: mypy x.py y.py z.py
+[file mypy.ini]
+[[mypy]
+hide_error_context = True
+[[mypy-x*,z*]
+disallow_untyped_defs = True
+[file x.py]
+def f(a): pass
+[file y.py]
+def f(a): pass
+[file z.py]
+def f(a): pass
+[out]
+z.py:1: error: Function is missing a type annotation
+x.py:1: error: Function is missing a type annotation
+
+[case testConfigErrorNoSection]
+# cmd: mypy -c pass
+[file mypy.ini]
+[out]
+mypy.ini: No [mypy] section in config file
+
+[case testConfigErrorUnknownFlag]
+# cmd: mypy -c pass
+[file mypy.ini]
+[[mypy]
+bad = 0
+[out]
+mypy.ini: [mypy]: Unrecognized option: bad = 0
+
+[case testConfigErrorUnknownReport]
+# cmd: mypy -c pass
+[file mypy.ini]
+[[mypy]
+bad_report = .
+[out]
+mypy.ini: [mypy]: Unrecognized report type: bad_report
+
+[case testConfigErrorBadBoolean]
+# cmd: mypy -c pass
+[file mypy.ini]
+[[mypy]
+ignore_missing_imports = nah
+[out]
+mypy.ini: [mypy]: ignore_missing_imports: Not a boolean: nah
+
+[case testConfigErrorNotPerFile]
+# cmd: mypy -c pass
+[file mypy.ini]
+[[mypy]
+[[mypy-*]
+strict_optional = True
+[out]
+mypy.ini: [mypy-*]: Per-module sections should only specify per-module flags (strict_optional)
+
+[case testCoberturaParser]
+# cmd: mypy --cobertura-xml-report build pkg
+[file pkg/__init__.py]
+[file pkg/a.py]
+from typing import Dict
+
+def foo() -> Dict:
+  z = {'hello': 'world'}
+  return z
+[file pkg/subpkg/__init__.py]
+[file pkg/subpkg/a.py]
+def bar() -> str:
+  return 'world'
+def untyped_function():
+  return 42
+[outfile build/cobertura.xml]
+<coverage timestamp="$TIMESTAMP" version="$VERSION" line-rate="0.8000" branch-rate="0">
+  <sources>
+    <source>$PWD</source>
+  </sources>
+  <packages>
+    <package complexity="1.0" name="pkg" branch-rate="0" line-rate="1.0000">
+      <classes>
+        <class complexity="1.0" filename="pkg/__init__.py" name="__init__.py" branch-rate="0" line-rate="1.0">
+          <methods/>
+          <lines/>
+        </class>
+        <class complexity="1.0" filename="pkg/a.py" name="a.py" branch-rate="0" line-rate="1.0000">
+          <methods/>
+          <lines>
+            <line branch="true" hits="1" number="3" precision="imprecise" condition-coverage="50% (1/2)"/>
+            <line branch="false" hits="1" number="4" precision="precise"/>
+            <line branch="false" hits="1" number="5" precision="precise"/>
+          </lines>
+        </class>
+      </classes>
+    </package>
+    <package complexity="1.0" name="pkg.subpkg" branch-rate="0" line-rate="0.5000">
+      <classes>
+        <class complexity="1.0" filename="pkg/subpkg/__init__.py" name="__init__.py" branch-rate="0" line-rate="1.0">
+          <methods/>
+          <lines/>
+        </class>
+        <class complexity="1.0" filename="pkg/subpkg/a.py" name="a.py" branch-rate="0" line-rate="0.5000">
+          <methods/>
+          <lines>
+            <line branch="false" hits="1" number="1" precision="precise"/>
+            <line branch="false" hits="0" number="3" precision="any"/>
+          </lines>
+        </class>
+      </classes>
+    </package>
+  </packages>
+</coverage>
+
+[case testConfigMypyPath]
+# cmd: mypy file.py
+[file mypy.ini]
+[[mypy]
+mypy_path =
+    foo:bar
+    , baz
+[file foo/foo.pyi]
+def foo(x: int) -> str: ...
+[file bar/bar.pyi]
+def bar(x: str) -> list: ...
+[file baz/baz.pyi]
+def baz(x: list) -> dict: ...
+[file file.py]
+import no_stubs
+from foo import foo
+from bar import bar
+from baz import baz
+baz(bar(foo(42)))
+baz(bar(foo('oof')))
+[out]
+file.py:1: error: Cannot find module named 'no_stubs'
+file.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+file.py:6: error: Argument 1 to "foo" has incompatible type "str"; expected "int"
+
+[case testIgnoreErrorsConfig]
+# cmd: mypy x.py y.py
+[file mypy.ini]
+[[mypy]
+[[mypy-x]
+ignore_errors = True
+[file x.py]
+"" + 0
+[file y.py]
+"" + 0
+[out]
+y.py:1: error: Unsupported operand types for + ("str" and "int")
+
+[case testConfigFollowImportsNormal]
+# cmd: mypy main.py
+[file main.py]
+from a import x
+x + 0
+x + ''  # E
+import a
+a.x + 0
+a.x + ''  # E
+a.y  # E
+a + 0  # E
+[file mypy.ini]
+[[mypy]
+follow_imports = normal
+[file a.py]
+x = 0
+x += ''  # Error reported here
+[out]
+a.py:2: error: Unsupported operand types for + ("int" and "str")
+main.py:3: error: Unsupported operand types for + ("int" and "str")
+main.py:6: error: Unsupported operand types for + ("int" and "str")
+main.py:7: error: "module" has no attribute "y"
+main.py:8: error: Unsupported operand types for + ("module" and "int")
+
+[case testConfigFollowImportsSilent]
+# cmd: mypy main.py
+[file main.py]
+from a import x
+x + ''
+import a
+a.x + ''
+a.y
+a + 0
+[file mypy.ini]
+[[mypy]
+follow_imports = silent
+[file a.py]
+x = 0
+x += ''  # No error reported
+[out]
+main.py:2: error: Unsupported operand types for + ("int" and "str")
+main.py:4: error: Unsupported operand types for + ("int" and "str")
+main.py:5: error: "module" has no attribute "y"
+main.py:6: error: Unsupported operand types for + ("module" and "int")
+
+[case testConfigFollowImportsSkip]
+# cmd: mypy main.py
+[file main.py]
+from a import x
+reveal_type(x)  # Expect Any
+import a
+reveal_type(a.x)  # Expect Any
+[file mypy.ini]
+[[mypy]
+follow_imports = skip
+[file a.py]
+/  # No error reported
+[out]
+main.py:2: error: Revealed type is 'Any'
+main.py:4: error: Revealed type is 'Any'
+
+[case testConfigFollowImportsError]
+# cmd: mypy main.py
+[file main.py]
+from a import x
+reveal_type(x)  # Expect Any
+import a  # Error reported here
+reveal_type(a.x)  # Expect Any
+[file mypy.ini]
+[[mypy]
+follow_imports = error
+[file a.py]
+/  # No error reported
+[out]
+main.py:1: note: Import of 'a' ignored
+main.py:1: note: (Using --follow-imports=error, module not passed on command line)
+main.py:2: error: Revealed type is 'Any'
+main.py:4: error: Revealed type is 'Any'
+
+[case testConfigFollowImportsSelective]
+# cmd: mypy main.py
+[file mypy.ini]
+[[mypy]
+[[mypy-normal]
+follow_imports = normal
+[[mypy-silent]
+follow_imports = silent
+[[mypy-skip]
+follow_imports = skip
+[[mypy-error]
+follow_imports = error
+[file main.py]
+import normal
+import silent
+import skip
+import error
+reveal_type(normal.x)
+reveal_type(silent.x)
+reveal_type(skip)
+reveal_type(error)
+[file normal.py]
+x = 0
+x += ''
+[file silent.py]
+x = 0
+x += ''
+[file skip.py]
+bla bla
+[file error.py]
+bla bla
+[out]
+main.py:4: note: Import of 'error' ignored
+main.py:4: note: (Using --follow-imports=error, module not passed on command line)
+normal.py:2: error: Unsupported operand types for + ("int" and "str")
+main.py:5: error: Revealed type is 'builtins.int'
+main.py:6: error: Revealed type is 'builtins.int'
+main.py:7: error: Revealed type is 'Any'
+main.py:8: error: Revealed type is 'Any'
+
+[case testConfigSilentMissingImportsOff]
+# cmd: mypy main.py
+[file main.py]
+import missing  # Expect error here
+reveal_type(missing.x)  # Expect Any
+[file mypy.ini]
+[[mypy]
+ignore_missing_imports = False
+[out]
+main.py:1: error: Cannot find module named 'missing'
+main.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main.py:2: error: Revealed type is 'Any'
+
+[case testConfigSilentMissingImportsOn]
+# cmd: mypy main.py
+[file main.py]
+import missing  # No error here
+reveal_type(missing.x)  # Expect Any
+[file mypy.ini]
+[[mypy]
+ignore_missing_imports = True
+[out]
+main.py:2: error: Revealed type is 'Any'
diff --git a/test-data/unit/fixtures/__new__.pyi b/test-data/unit/fixtures/__new__.pyi
new file mode 100644
index 0000000..4e2cc57
--- /dev/null
+++ b/test-data/unit/fixtures/__new__.pyi
@@ -0,0 +1,14 @@
+# builtins stub with object.__new__
+
+class object:
+    def __init__(self) -> None: pass
+
+    def __new__(cls): pass
+
+class type:
+    def __init__(self, x) -> None: pass
+
+class int: pass
+class bool: pass
+class str: pass
+class function: pass
diff --git a/test-data/unit/fixtures/alias.pyi b/test-data/unit/fixtures/alias.pyi
new file mode 100644
index 0000000..5909cb6
--- /dev/null
+++ b/test-data/unit/fixtures/alias.pyi
@@ -0,0 +1,12 @@
+# Builtins test fixture with a type alias 'bytes'
+
+class object:
+    def __init__(self) -> None: pass
+class type:
+    def __init__(self, x) -> None: pass
+
+class int: pass
+class str: pass
+class function: pass
+
+bytes = str
diff --git a/test-data/unit/fixtures/args.pyi b/test-data/unit/fixtures/args.pyi
new file mode 100644
index 0000000..e4a6ffe
--- /dev/null
+++ b/test-data/unit/fixtures/args.pyi
@@ -0,0 +1,29 @@
+# Builtins stub used to support *args, **kwargs.
+
+from typing import TypeVar, Generic, Iterable, Tuple, Dict, Any, overload
+
+Tco = TypeVar('Tco', covariant=True)
+T = TypeVar('T')
+S = TypeVar('S')
+
+class object:
+    def __init__(self) -> None: pass
+    def __eq__(self, o: object) -> bool: pass
+    def __ne__(self, o: object) -> bool: pass
+
+class type:
+    @overload
+    def __init__(self, o: object) -> None: pass
+    @overload
+    def __init__(self, name: str, bases: Tuple[type, ...], dict: Dict[str, Any]) -> None: pass
+    def __call__(self, *args: Any, **kwargs: Any) -> Any: pass
+
+class tuple(Iterable[Tco], Generic[Tco]): pass
+class dict(Generic[T, S]): pass
+
+class int:
+    def __eq__(self, o: object) -> bool: pass
+class str: pass
+class bool: pass
+class function: pass
+class module: pass
diff --git a/test-data/unit/fixtures/async_await.pyi b/test-data/unit/fixtures/async_await.pyi
new file mode 100644
index 0000000..7a166a0
--- /dev/null
+++ b/test-data/unit/fixtures/async_await.pyi
@@ -0,0 +1,9 @@
+import typing
+class object:
+    def __init__(self): pass
+class type: pass
+class function: pass
+class int: pass
+class str: pass
+class list: pass
+class tuple: pass
diff --git a/test-data/unit/fixtures/bool.pyi b/test-data/unit/fixtures/bool.pyi
new file mode 100644
index 0000000..b463b02
--- /dev/null
+++ b/test-data/unit/fixtures/bool.pyi
@@ -0,0 +1,15 @@
+# builtins stub used in boolean-related test cases.
+
+from typing import builtinclass
+
+ at builtinclass
+class object:
+    def __init__(self) -> None: pass
+
+class type: pass
+class tuple: pass
+class function: pass
+class bool: pass
+class int: pass
+class str: pass
+class unicode: pass
diff --git a/test-data/unit/fixtures/callable.pyi b/test-data/unit/fixtures/callable.pyi
new file mode 100644
index 0000000..ae58648
--- /dev/null
+++ b/test-data/unit/fixtures/callable.pyi
@@ -0,0 +1,26 @@
+from typing import Generic, Tuple, TypeVar, Union
+
+T = TypeVar('T')
+
+class object:
+    def __init__(self) -> None: pass
+
+class type:
+    def __init__(self, x) -> None: pass
+
+class tuple(Generic[T]): pass
+
+class function: pass
+
+def isinstance(x: object, t: Union[type, Tuple[type, ...]]) -> bool: pass
+
+def callable(x: object) -> bool: pass
+
+class int:
+    def __add__(self, other: 'int') -> 'int': pass
+    def __eq__(self, other: 'int') -> 'bool': pass
+class float: pass
+class bool(int): pass
+class str:
+    def __add__(self, other: 'str') -> 'str': pass
+    def __eq__(self, other: 'str') -> bool: pass
diff --git a/test-data/unit/fixtures/classmethod.pyi b/test-data/unit/fixtures/classmethod.pyi
new file mode 100644
index 0000000..282839d
--- /dev/null
+++ b/test-data/unit/fixtures/classmethod.pyi
@@ -0,0 +1,22 @@
+import typing
+
+class object:
+    def __init__(self) -> None: pass
+
+class type:
+    def __init__(self, x) -> None: pass
+    def mro(self) -> typing.Any: pass
+
+class function: pass
+
+# Dummy definitions.
+classmethod = object()
+staticmethod = object()
+
+class int:
+    @classmethod
+    def from_bytes(cls, bytes: bytes, byteorder: str) -> int: pass
+
+class str: pass
+class bytes: pass
+class bool: pass
diff --git a/test-data/unit/fixtures/complex.pyi b/test-data/unit/fixtures/complex.pyi
new file mode 100644
index 0000000..d4135be
--- /dev/null
+++ b/test-data/unit/fixtures/complex.pyi
@@ -0,0 +1,11 @@
+# Builtins stub used for some float/complex test cases.
+
+class object:
+    def __init__(self): pass
+
+class type: pass
+class function: pass
+class int: pass
+class float: pass
+class complex: pass
+class str: pass
diff --git a/test-data/unit/fixtures/dict.pyi b/test-data/unit/fixtures/dict.pyi
new file mode 100644
index 0000000..18f3ebe
--- /dev/null
+++ b/test-data/unit/fixtures/dict.pyi
@@ -0,0 +1,35 @@
+# Builtins stub used in dictionary-related test cases.
+
+from typing import TypeVar, Generic, Iterable, Iterator, Mapping, Tuple, overload
+
+T = TypeVar('T')
+KT = TypeVar('KT')
+VT = TypeVar('VT')
+
+class object:
+    def __init__(self) -> None: pass
+
+class type: pass
+
+class dict(Iterable[KT], Mapping[KT, VT], Generic[KT, VT]):
+    @overload
+    def __init__(self, **kwargs: VT) -> None: pass
+    @overload
+    def __init__(self, arg: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: pass
+    def __setitem__(self, k: KT, v: VT) -> None: pass
+    def __iter__(self) -> Iterator[KT]: pass
+    def update(self, a: Mapping[KT, VT]) -> None: pass
+
+class int: # for convenience
+    def __add__(self, x: int) -> int: pass
+
+class str: pass # for keyword argument key type
+class unicode: pass # needed for py2 docstrings
+
+class list(Iterable[T], Generic[T]): # needed by some test cases
+    def __iter__(self) -> Iterator[T]: pass
+    def __mul__(self, x: int) -> list[T]: pass
+
+class tuple: pass
+class function: pass
+class float: pass
diff --git a/test-data/unit/fixtures/exception.pyi b/test-data/unit/fixtures/exception.pyi
new file mode 100644
index 0000000..5a2482d
--- /dev/null
+++ b/test-data/unit/fixtures/exception.pyi
@@ -0,0 +1,13 @@
+
+class object:
+    def __init__(self): pass
+
+class type: pass
+class tuple: pass
+class function: pass
+class int: pass
+class str: pass
+class unicode: pass
+class bool: pass
+
+class BaseException: pass
diff --git a/test-data/unit/fixtures/for.pyi b/test-data/unit/fixtures/for.pyi
new file mode 100644
index 0000000..4762806
--- /dev/null
+++ b/test-data/unit/fixtures/for.pyi
@@ -0,0 +1,19 @@
+# builtins stub used in for statement test cases
+
+from typing import TypeVar, Generic, Iterable, Iterator, Generator
+from abc import abstractmethod, ABCMeta
+
+t = TypeVar('t')
+
+class object:
+    def __init__(self) -> None: pass
+
+class type: pass
+class tuple: pass
+class function: pass
+class bool: pass
+class int: pass # for convenience
+class str: pass # for convenience
+
+class list(Iterable[t], Generic[t]):
+    def __iter__(self) -> Iterator[t]: pass
diff --git a/test-data/unit/fixtures/function.pyi b/test-data/unit/fixtures/function.pyi
new file mode 100644
index 0000000..768ca90
--- /dev/null
+++ b/test-data/unit/fixtures/function.pyi
@@ -0,0 +1,10 @@
+from typing import builtinclass
+
+ at builtinclass
+class object:
+    def __init__(self): pass
+
+class type: pass
+class function: pass
+class int: pass
+class str: pass
diff --git a/test-data/unit/fixtures/isinstance.pyi b/test-data/unit/fixtures/isinstance.pyi
new file mode 100644
index 0000000..c155a97
--- /dev/null
+++ b/test-data/unit/fixtures/isinstance.pyi
@@ -0,0 +1,22 @@
+from typing import builtinclass, Tuple, TypeVar, Generic, Union
+
+T = TypeVar('T')
+
+class object:
+    def __init__(self) -> None: pass
+
+class type:
+    def __init__(self, x) -> None: pass
+
+class tuple(Generic[T]): pass
+
+class function: pass
+
+def isinstance(x: object, t: Union[type, Tuple[type, ...]]) -> bool: pass
+
+class int:
+    def __add__(self, other: 'int') -> 'int': pass
+class float: pass
+class bool(int): pass
+class str:
+    def __add__(self, other: 'str') -> 'str': pass
diff --git a/test-data/unit/fixtures/isinstancelist.pyi b/test-data/unit/fixtures/isinstancelist.pyi
new file mode 100644
index 0000000..4b35698
--- /dev/null
+++ b/test-data/unit/fixtures/isinstancelist.pyi
@@ -0,0 +1,44 @@
+from typing import builtinclass, Iterable, Iterator, Generic, TypeVar, List, Mapping, overload, Tuple
+
+ at builtinclass
+class object:
+    def __init__(self) -> None: pass
+
+ at builtinclass
+class type:
+    def __init__(self, x) -> None: pass
+
+class tuple: pass
+class function: pass
+
+def isinstance(x: object, t: type) -> bool: pass
+
+ at builtinclass
+class int:
+    def __add__(self, x: int) -> int: pass
+ at builtinclass
+class bool(int): pass
+ at builtinclass
+class str:
+    def __add__(self, x: str) -> str: pass
+    def __getitem__(self, x: int) -> str: pass
+
+T = TypeVar('T')
+KT = TypeVar('KT')
+VT = TypeVar('VT')
+
+class list(Iterable[T], Generic[T]):
+    def __iter__(self) -> Iterator[T]: pass
+    def __mul__(self, x: int) -> list[T]: pass
+    def __setitem__(self, x: int, v: T) -> None: pass
+    def __getitem__(self, x: int) -> T: pass
+    def __add__(self, x: List[T]) -> T: pass
+
+class dict(Iterable[KT], Mapping[KT, VT], Generic[KT, VT]):
+    @overload
+    def __init__(self, **kwargs: VT) -> None: pass
+    @overload
+    def __init__(self, arg: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: pass
+    def __setitem__(self, k: KT, v: VT) -> None: pass
+    def __iter__(self) -> Iterator[KT]: pass
+    def update(self, a: Mapping[KT, VT]) -> None: pass
diff --git a/test-data/unit/fixtures/list.pyi b/test-data/unit/fixtures/list.pyi
new file mode 100644
index 0000000..9413cf7
--- /dev/null
+++ b/test-data/unit/fixtures/list.pyi
@@ -0,0 +1,30 @@
+# Builtins stub used in list-related test cases.
+
+from typing import TypeVar, Generic, builtinclass, Iterable, Iterator, overload
+
+T = TypeVar('T')
+
+ at builtinclass
+class object:
+    def __init__(self): pass
+
+class type: pass
+class ellipsis: pass
+
+class list(Iterable[T], Generic[T]):
+    @overload
+    def __init__(self) -> None: pass
+    @overload
+    def __init__(self, x: Iterable[T]) -> None: pass
+    def __iter__(self) -> Iterator[T]: pass
+    def __add__(self, x: list[T]) -> list[T]: pass
+    def __mul__(self, x: int) -> list[T]: pass
+    def __getitem__(self, x: int) -> T: pass
+    def append(self, x: T) -> None: pass
+    def extend(self, x: Iterable[T]) -> None: pass
+
+class tuple: pass
+class function: pass
+class int: pass
+class str: pass
+class bool: pass
diff --git a/test-data/unit/fixtures/module.pyi b/test-data/unit/fixtures/module.pyi
new file mode 100644
index 0000000..fb2a4c2
--- /dev/null
+++ b/test-data/unit/fixtures/module.pyi
@@ -0,0 +1,18 @@
+from typing import Any, Dict, Generic, TypeVar
+
+T = TypeVar('T')
+S = TypeVar('S')
+
+class object:
+    def __init__(self) -> None: pass
+class module:
+    __name__ = ...  # type: str
+    __file__ = ...  # type: str
+    __dict__ = ...  # type: Dict[str, Any]
+class type: pass
+class function: pass
+class int: pass
+class str: pass
+class bool: pass
+class tuple: pass
+class dict(Generic[T, S]): pass
diff --git a/test-data/unit/fixtures/module_all.pyi b/test-data/unit/fixtures/module_all.pyi
new file mode 100644
index 0000000..cc1b552
--- /dev/null
+++ b/test-data/unit/fixtures/module_all.pyi
@@ -0,0 +1,15 @@
+from typing import Generic, Sequence, TypeVar
+_T = TypeVar('_T')
+
+class object:
+    def __init__(self) -> None: pass
+class module: pass
+class type: pass
+class function: pass
+class int: pass
+class str: pass
+class list(Generic[_T], Sequence[_T]):
+    def append(self, x: _T): pass
+    def extend(self, x: Sequence[_T]): pass
+    def __add__(self, rhs: Sequence[_T]) -> list[_T]: pass
+class tuple: pass
diff --git a/test-data/unit/fixtures/module_all_python2.pyi b/test-data/unit/fixtures/module_all_python2.pyi
new file mode 100644
index 0000000..ed17d4d
--- /dev/null
+++ b/test-data/unit/fixtures/module_all_python2.pyi
@@ -0,0 +1,16 @@
+from typing import Generic, Sequence, TypeVar
+_T = TypeVar('_T')
+
+class object:
+    def __init__(self) -> None: pass
+class module: pass
+class type: pass
+class function: pass
+class int: pass
+class str: pass
+class unicode: pass
+class list(Generic[_T], Sequence[_T]):
+    def append(self, x: _T): pass
+    def extend(self, x: Sequence[_T]): pass
+    def __add__(self, rhs: Sequence[_T]) -> list[_T]: pass
+class tuple: pass
diff --git a/test-data/unit/fixtures/ops.pyi b/test-data/unit/fixtures/ops.pyi
new file mode 100644
index 0000000..8956b79
--- /dev/null
+++ b/test-data/unit/fixtures/ops.pyi
@@ -0,0 +1,58 @@
+from typing import builtinclass, overload, Any, Generic, Sequence, Tuple, TypeVar
+
+Tco = TypeVar('Tco', covariant=True)
+
+# This is an extension of transform builtins with additional operations.
+
+ at builtinclass
+class object:
+    def __init__(self) -> None: pass
+    def __eq__(self, o: 'object') -> 'bool': pass
+    def __ne__(self, o: 'object') -> 'bool': pass
+
+class type: pass
+
+class slice: pass
+
+class tuple(Sequence[Tco], Generic[Tco]):
+    def __getitem__(self, x: int) -> Tco: pass
+    def __eq__(self, x: object) -> bool: pass
+    def __ne__(self, x: object) -> bool: pass
+    def __lt__(self, x: 'tuple') -> bool: pass
+    def __le__(self, x: 'tuple') -> bool: pass
+    def __gt__(self, x: 'tuple') -> bool: pass
+    def __ge__(self, x: 'tuple') -> bool: pass
+
+class function: pass
+
+class bool: pass
+
+class str:
+    def __init__(self, x: 'int') -> None: pass
+    def __add__(self, x: 'str') -> 'str': pass
+    def startswith(self, x: 'str') -> bool: pass
+
+class unicode: pass
+
+class int:
+    def __add__(self, x: 'int') -> 'int': pass
+    def __sub__(self, x: 'int') -> 'int': pass
+    def __mul__(self, x: 'int') -> 'int': pass
+    def __mod__(self, x: 'int') -> 'int': pass
+    def __floordiv__(self, x: 'int') -> 'int': pass
+    def __pos__(self) -> 'int': pass
+    def __neg__(self) -> 'int': pass
+    def __eq__(self, x: object) -> bool: pass
+    def __ne__(self, x: object) -> bool: pass
+    def __lt__(self, x: 'int') -> bool: pass
+    def __le__(self, x: 'int') -> bool: pass
+    def __gt__(self, x: 'int') -> bool: pass
+    def __ge__(self, x: 'int') -> bool: pass
+
+class float: pass
+
+class BaseException: pass
+
+def __print(a1=None, a2=None, a3=None, a4=None): pass
+
+class module: pass
diff --git a/test-data/unit/fixtures/primitives.pyi b/test-data/unit/fixtures/primitives.pyi
new file mode 100644
index 0000000..b6ec4d4
--- /dev/null
+++ b/test-data/unit/fixtures/primitives.pyi
@@ -0,0 +1,17 @@
+# builtins stub with non-generic primitive types
+
+class object:
+    def __init__(self) -> None: pass
+
+class type:
+    def __init__(self, x) -> None: pass
+
+class int: pass
+class float: pass
+class complex: pass
+class bool: pass
+class str: pass
+class bytes: pass
+class bytearray: pass
+class tuple: pass
+class function: pass
diff --git a/test-data/unit/fixtures/property.pyi b/test-data/unit/fixtures/property.pyi
new file mode 100644
index 0000000..b2e747b
--- /dev/null
+++ b/test-data/unit/fixtures/property.pyi
@@ -0,0 +1,17 @@
+import typing
+
+class object:
+    def __init__(self) -> None: pass
+
+class type:
+    def __init__(self, x) -> None: pass
+
+class function: pass
+
+property = object() # Dummy definition.
+
+class int: pass
+class str: pass
+class bytes: pass
+class tuple: pass
+class bool: pass
diff --git a/test-data/unit/fixtures/python2.pyi b/test-data/unit/fixtures/python2.pyi
new file mode 100644
index 0000000..61e48be
--- /dev/null
+++ b/test-data/unit/fixtures/python2.pyi
@@ -0,0 +1,18 @@
+from typing import Generic, Iterable, TypeVar
+
+class object:
+    def __init__(self) -> None: pass
+
+class type:
+    def __init__(self, x) -> None: pass
+
+class function: pass
+
+class int: pass
+class str: pass
+class unicode: pass
+
+T = TypeVar('T')
+class list(Iterable[T], Generic[T]): pass
+
+# Definition of None is implicit
diff --git a/test-data/unit/fixtures/set.pyi b/test-data/unit/fixtures/set.pyi
new file mode 100644
index 0000000..cb8bbcf
--- /dev/null
+++ b/test-data/unit/fixtures/set.pyi
@@ -0,0 +1,21 @@
+# Builtins stub used in set-related test cases.
+
+from typing import TypeVar, Generic, Iterator, Iterable, Set
+
+T = TypeVar('T')
+
+class object:
+    def __init__(self) -> None: pass
+
+class type: pass
+class tuple: pass
+class function: pass
+
+class int: pass
+class str: pass
+
+class set(Iterable[T], Generic[T]):
+    def __iter__(self) -> Iterator[T]: pass
+    def add(self, x: T) -> None: pass
+    def discard(self, x: T) -> None: pass
+    def update(self, x: Set[T]) -> None: pass
diff --git a/test-data/unit/fixtures/slice.pyi b/test-data/unit/fixtures/slice.pyi
new file mode 100644
index 0000000..c01ffbb
--- /dev/null
+++ b/test-data/unit/fixtures/slice.pyi
@@ -0,0 +1,13 @@
+# Builtins stub used in slicing test cases.
+
+class object:
+    def __init__(self): pass
+
+class type: pass
+class tuple: pass
+class function: pass
+
+class int: pass
+class str: pass
+
+class slice: pass
diff --git a/test-data/unit/fixtures/staticmethod.pyi b/test-data/unit/fixtures/staticmethod.pyi
new file mode 100644
index 0000000..5f1013f
--- /dev/null
+++ b/test-data/unit/fixtures/staticmethod.pyi
@@ -0,0 +1,19 @@
+import typing
+
+class object:
+    def __init__(self) -> None: pass
+
+class type:
+    def __init__(self, x) -> None: pass
+
+class function: pass
+
+staticmethod = object() # Dummy definition.
+
+class int:
+    @staticmethod
+    def from_bytes(bytes: bytes, byteorder: str) -> int: pass
+
+class str: pass
+class unicode: pass
+class bytes: pass
diff --git a/test-data/unit/fixtures/transform.pyi b/test-data/unit/fixtures/transform.pyi
new file mode 100644
index 0000000..afdc2bf
--- /dev/null
+++ b/test-data/unit/fixtures/transform.pyi
@@ -0,0 +1,30 @@
+# Builtins stubs used implicitly in program transformation test cases.
+
+class object:
+    def __init__(self) -> None: pass
+
+class type: pass
+
+# str is handy for debugging; allows outputting messages.
+class str: pass
+
+# Primitive types int/float have special coercion behaviour (they may have
+# a different representation from ordinary values).
+
+class int: pass
+
+class float: pass
+
+
+# The functions below are special functions used in test cases; their
+# implementations are actually in the __dynchk module, but they are defined
+# here so that the semantic analyzer and the type checker are happy without
+# having to analyze the entire __dynchk module all the time.
+#
+# The transformation implementation has special case handling for these
+# functions; it's a bit ugly but it works for now.
+
+def __print(a1=None, a2=None, a3=None, a4=None):
+    # Do not use *args since this would require list and break many test
+    # cases.
+    pass
diff --git a/test-data/unit/fixtures/tuple-simple.pyi b/test-data/unit/fixtures/tuple-simple.pyi
new file mode 100644
index 0000000..b195dfa
--- /dev/null
+++ b/test-data/unit/fixtures/tuple-simple.pyi
@@ -0,0 +1,20 @@
+# Builtins stub used in some tuple-related test cases.
+#
+# This is a simpler version of tuple.py which is useful
+# and makes some test cases easier to write/debug.
+
+from typing import Iterable, TypeVar, Generic
+
+T = TypeVar('T')
+
+class object:
+    def __init__(self): pass
+
+class type: pass
+class tuple(Generic[T]):
+    def __getitem__(self, x: int) -> T: pass
+class function: pass
+
+# We need int for indexing tuples.
+class int: pass
+class str: pass # For convenience
diff --git a/test-data/unit/fixtures/tuple.pyi b/test-data/unit/fixtures/tuple.pyi
new file mode 100644
index 0000000..2300cf4
--- /dev/null
+++ b/test-data/unit/fixtures/tuple.pyi
@@ -0,0 +1,29 @@
+# Builtins stub used in tuple-related test cases.
+
+from typing import Iterable, Iterator, TypeVar, Generic, Sequence
+
+Tco = TypeVar('Tco', covariant=True)
+
+class object:
+    def __init__(self): pass
+
+class type:
+    def __init__(self, *a) -> None: pass
+    def __call__(self, *a) -> object: pass
+class tuple(Sequence[Tco], Generic[Tco]):
+    def __iter__(self) -> Iterator[Tco]: pass
+    def __getitem__(self, x: int) -> Tco: pass
+class function: pass
+
+# We need int for indexing tuples.
+class int: pass
+class bool: pass
+class str: pass # For convenience
+class unicode: pass
+
+T = TypeVar('T')
+
+class list(Sequence[T], Generic[T]): pass
+def isinstance(x: object, t: type) -> bool: pass
+
+def sum(iterable: Iterable[T], start: T = None) -> T: pass
diff --git a/test-data/unit/fixtures/union.pyi b/test-data/unit/fixtures/union.pyi
new file mode 100644
index 0000000..78a41f9
--- /dev/null
+++ b/test-data/unit/fixtures/union.pyi
@@ -0,0 +1,18 @@
+# Builtins stub used in tuple-related test cases.
+
+from isinstance import isinstance
+from typing import Iterable, TypeVar
+
+class object:
+    def __init__(self): pass
+
+class type: pass
+class function: pass
+
+# Current tuple types get special treatment in the type checker, thus there
+# is no need for type arguments here.
+class tuple: pass
+
+# We need int for indexing tuples.
+class int: pass
+class str: pass # For convenience
diff --git a/test-data/unit/lib-stub/__builtin__.pyi b/test-data/unit/lib-stub/__builtin__.pyi
new file mode 100644
index 0000000..eec6228
--- /dev/null
+++ b/test-data/unit/lib-stub/__builtin__.pyi
@@ -0,0 +1,27 @@
+class Any: pass
+
+class object:
+    def __init__(self):
+        # type: () -> None
+        pass
+
+class type:
+    def __init__(self, x):
+        # type: (Any) -> None
+        pass
+
+# These are provided here for convenience.
+class int: pass
+class float: pass
+
+class str: pass
+class unicode: pass
+
+class tuple: pass
+class function: pass
+
+class ellipsis: pass
+
+def print(*args, end=''): pass
+
+# Definition of None is implicit
diff --git a/test-data/unit/lib-stub/abc.pyi b/test-data/unit/lib-stub/abc.pyi
new file mode 100644
index 0000000..4afe734
--- /dev/null
+++ b/test-data/unit/lib-stub/abc.pyi
@@ -0,0 +1,3 @@
+class ABCMeta: pass
+abstractmethod = object()
+abstractproperty = object()
diff --git a/test-data/unit/lib-stub/builtins.pyi b/test-data/unit/lib-stub/builtins.pyi
new file mode 100644
index 0000000..9a636bf
--- /dev/null
+++ b/test-data/unit/lib-stub/builtins.pyi
@@ -0,0 +1,23 @@
+class Any: pass
+
+class object:
+    def __init__(self) -> None: pass
+
+class type:
+    def __init__(self, x: Any) -> None: pass
+
+# These are provided here for convenience.
+class int:
+    def __add__(self, other: 'int') -> 'int': pass
+class float: pass
+
+class str:
+    def __add__(self, other: 'str') -> 'str': pass
+class bytes: pass
+
+class tuple: pass
+class function: pass
+
+class ellipsis: pass
+
+# Definition of None is implicit
diff --git a/test-data/unit/lib-stub/collections.pyi b/test-data/unit/lib-stub/collections.pyi
new file mode 100644
index 0000000..00b7cea
--- /dev/null
+++ b/test-data/unit/lib-stub/collections.pyi
@@ -0,0 +1,3 @@
+import typing
+
+namedtuple = object()
diff --git a/test-data/unit/lib-stub/mypy_extensions.pyi b/test-data/unit/lib-stub/mypy_extensions.pyi
new file mode 100644
index 0000000..2bfc072
--- /dev/null
+++ b/test-data/unit/lib-stub/mypy_extensions.pyi
@@ -0,0 +1,6 @@
+from typing import Dict, Type, TypeVar
+
+T = TypeVar('T')
+
+
+def TypedDict(typename: str, fields: Dict[str, Type[T]]) -> Type[dict]: pass
diff --git a/test-data/unit/lib-stub/sys.pyi b/test-data/unit/lib-stub/sys.pyi
new file mode 100644
index 0000000..3959cb0
--- /dev/null
+++ b/test-data/unit/lib-stub/sys.pyi
@@ -0,0 +1,2 @@
+version_info = (0, 0, 0, '', 0)
+platform = ''
diff --git a/test-data/unit/lib-stub/types.pyi b/test-data/unit/lib-stub/types.pyi
new file mode 100644
index 0000000..aa0a19f
--- /dev/null
+++ b/test-data/unit/lib-stub/types.pyi
@@ -0,0 +1,4 @@
+from typing import TypeVar
+T = TypeVar('T')
+def coroutine(func: T) -> T:
+    return func
diff --git a/test-data/unit/lib-stub/typing.pyi b/test-data/unit/lib-stub/typing.pyi
new file mode 100644
index 0000000..77a7b34
--- /dev/null
+++ b/test-data/unit/lib-stub/typing.pyi
@@ -0,0 +1,90 @@
+# Stub for typing module. Many of the definitions have special handling in
+# the type checker, so they can just be initialized to anything.
+
+from abc import abstractmethod
+
+cast = 0
+overload = 0
+Any = 0
+Union = 0
+Optional = 0
+TypeVar = 0
+Generic = 0
+Tuple = 0
+Callable = 0
+builtinclass = 0
+_promote = 0
+NamedTuple = 0
+Type = 0
+no_type_check = 0
+
+# Type aliases.
+List = 0
+Dict = 0
+Set = 0
+
+T = TypeVar('T')
+U = TypeVar('U')
+V = TypeVar('V')
+S = TypeVar('S')
+
+class Container(Generic[T]):
+    @abstractmethod
+    # Use int because bool isn't in the default test builtins
+    def __contains__(self, arg: T) -> int: pass
+
+class Sized:
+    @abstractmethod
+    def __len__(self) -> int: pass
+
+class Iterable(Generic[T]):
+    @abstractmethod
+    def __iter__(self) -> 'Iterator[T]': pass
+
+class Iterator(Iterable[T], Generic[T]):
+    @abstractmethod
+    def __next__(self) -> T: pass
+
+class Generator(Iterator[T], Generic[T, U, V]):
+    @abstractmethod
+    def send(self, value: U) -> T: pass
+
+    @abstractmethod
+    def throw(self, typ: Any, val: Any=None, tb=None) -> None: pass
+
+    @abstractmethod
+    def close(self) -> None: pass
+
+    @abstractmethod
+    def __iter__(self) -> 'Generator[T, U, V]': pass
+
+class Awaitable(Generic[T]):
+    @abstractmethod
+    def __await__(self) -> Generator[Any, Any, T]: pass
+
+class AwaitableGenerator(Generator[T, U, V], Awaitable[V], Generic[T, U, V, S]):
+    pass
+
+class AsyncIterable(Generic[T]):
+    @abstractmethod
+    def __aiter__(self) -> 'AsyncIterator[T]': pass
+
+class AsyncIterator(AsyncIterable[T], Generic[T]):
+    def __aiter__(self) -> 'AsyncIterator[T]': return self
+    @abstractmethod
+    def __anext__(self) -> Awaitable[T]: pass
+
+class Sequence(Iterable[T], Generic[T]):
+    @abstractmethod
+    def __getitem__(self, n: Any) -> T: pass
+
+class Mapping(Generic[T, U]): pass
+
+class MutableMapping(Generic[T, U]): pass
+
+def NewType(name: str, tp: Type[T]) -> Callable[[T], T]:
+    def new_type(x):
+        return x
+    return new_type
+
+TYPE_CHECKING = 1
diff --git a/test-data/unit/parse-errors.test b/test-data/unit/parse-errors.test
new file mode 100644
index 0000000..22a3c5c
--- /dev/null
+++ b/test-data/unit/parse-errors.test
@@ -0,0 +1,448 @@
+-- Test cases for parser errors. Each test case consists of two sections.
+-- The first section contains [case NAME] followed by the input code, while
+-- the second section contains [out] followed by the output from the parser.
+--
+-- The input file name in errors is "file".
+--
+-- Comments starting with "--" in this file will be ignored, except for lines
+-- starting with "----" that are not ignored. The first two dashes of these
+-- lines are interpreted as escapes and removed.
+
+[case testInvalidFunction]
+def f()
+  pass
+[out]
+file:1: error: invalid syntax
+
+[case testMissingIndent]
+if x:
+1
+[out]
+file:2: error: invalid syntax
+
+[case testUnexpectedIndent]
+1
+ 2
+[out]
+file:2: error: unexpected indent
+
+[case testInconsistentIndent]
+if x:
+  1
+   1
+[out]
+file:3: error: unexpected indent
+
+[case testInconsistentIndent2]
+if x:
+   1
+  1
+[out]
+file:3: error: unindent does not match any outer indentation level
+
+[case testInvalidBinaryOp]
+1>
+a*
+a+1*
+[out]
+file:1: error: invalid syntax
+
+[case testDoubleStar]
+**a
+[out]
+file:1: error: invalid syntax
+
+[case testInvalidSuperClass]
+class A(C[):
+  pass
+[out]
+file:1: error: invalid syntax
+
+[case testMissingSuperClass]
+class A(:
+  pass
+[out]
+file:1: error: invalid syntax
+
+[case testUnexpectedEof]
+if 1:
+[out]
+file:1: error: unexpected EOF while parsing
+
+[case testInvalidKeywordArguments1]
+f(x=y, z)
+[out]
+file:1: error: positional argument follows keyword argument
+
+[case testInvalidKeywordArguments2]
+f(**x, y)
+[out]
+file:1: error: positional argument follows keyword argument unpacking
+
+[case testInvalidBareAsteriskAndVarArgs2]
+def f(*x: A, *) -> None: pass
+[out]
+file:1: error: invalid syntax
+
+[case testInvalidBareAsteriskAndVarArgs3]
+def f(*, *x: A) -> None: pass
+[out]
+file:1: error: invalid syntax
+
+[case testInvalidBareAsteriskAndVarArgs4]
+def f(*, **x: A) -> None: pass
+[out]
+file:1: error: named arguments must follow bare *
+
+[case testInvalidBareAsterisk1]
+def f(*) -> None: pass
+[out]
+file:1: error: named arguments must follow bare *
+
+[case testInvalidBareAsterisk2]
+def f(x, *) -> None: pass
+[out]
+file:1: error: named arguments must follow bare *
+
+[case testInvalidFuncDefArgs1]
+def f(x = y, x): pass
+[out]
+file:1: error: non-default argument follows default argument
+
+[case testInvalidFuncDefArgs3]
+def f(**x, y):
+   pass
+[out]
+file:1: error: invalid syntax
+
+[case testInvalidFuncDefArgs4]
+def f(**x, y=x):
+    pass
+[out]
+file:1: error: invalid syntax
+
+[case testInvalidStringLiteralType]
+def f(x:
+     'A['
+     ) -> None: pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidStringLiteralType2]
+def f(x:
+      'A B'
+      ) -> None: pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidTypeComment]
+0
+x = 0 # type: A A
+[out]
+file:2: error: syntax error in type comment
+
+[case testInvalidTypeComment2]
+0
+x = 0 # type: A[
+[out]
+file:2: error: syntax error in type comment
+
+[case testInvalidTypeComment3]
+0
+x = 0 # type:
+[out]
+file:2: error: syntax error in type comment
+
+[case testInvalidTypeComment4]
+0
+x = 0 # type: *
+[out]
+file:2: error: syntax error in type comment
+
+[case testInvalidMultilineLiteralType]
+def f() -> "A\nB": pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidSignatureInComment1]
+def f(): # type: x
+  pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidSignatureInComment2]
+def f(): # type:
+  pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidSignatureInComment3]
+def f(): # type: (
+  pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidSignatureInComment4]
+def f(): # type: (.
+  pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidSignatureInComment5]
+def f(): # type: (x
+  pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidSignatureInComment6]
+def f(): # type: (x)
+  pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidSignatureInComment7]
+def f(): # type: (x) -
+  pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidSignatureInComment8]
+def f(): # type: (x) ->
+  pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidSignatureInComment9]
+def f(): # type: (x) -> .
+  pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testInvalidSignatureInComment10]
+def f(): # type: (x) -> x x
+  pass
+[out]
+file:1: error: syntax error in type comment
+
+[case testDuplicateSignatures1]
+def f() -> None: # type: () -> None
+  pass
+def f(): # type: () -> None
+    pass
+[out]
+file:1: error: Function has duplicate type signatures
+
+[case testDuplicateSignatures2]
+def f(x, y: Z): # type: (x, y) -> z
+  pass
+[out]
+file:1: error: Function has duplicate type signatures
+
+[case testTooManyTypes]
+def f(x, y): # type: (X, Y, Z) -> z
+  pass
+[out]
+file:1: error: Type signature has too many arguments
+
+[case testTooFewTypes]
+def f(x, y): # type: (X) -> z
+  pass
+[out]
+file:1: error: Type signature has too few arguments
+
+[case testCommentFunctionAnnotationVarArgMispatch-skip]
+# see mypy issue #1997
+def f(x): # type: (*X) -> Y
+    pass
+def g(*x): # type: (X) -> Y
+    pass
+[out]
+file:1: error: Inconsistent use of '*' in function signature
+file:3: error: Inconsistent use of '*' in function signature
+
+[case testCommentFunctionAnnotationVarArgMispatch2-skip]
+# see mypy issue #1997
+def f(*x, **y): # type: (**X, *Y) -> Z
+    pass
+def g(*x, **y): # type: (*X, *Y) -> Z
+    pass
+[out]
+file:1: error: Inconsistent use of '*' in function signature
+file:3: error: syntax error in type comment
+file:3: error: Inconsistent use of '*' in function signature
+file:3: error: Inconsistent use of '**' in function signature
+
+[case testPrintStatementInPython3-skip]
+print 1
+[out]
+file:1: error: Missing parentheses in call to 'print'
+
+[case testInvalidConditionInConditionalExpression]
+1 if 2, 3 else 4
+[out]
+file:1: error: invalid syntax
+
+[case testInvalidConditionInConditionalExpression2]
+1 if x for y in z else 4
+[out]
+file:1: error: invalid syntax
+
+[case testInvalidConditionInConditionalExpression2]
+1 if x else for y in z
+[out]
+file:1: error: invalid syntax
+
+[case testYieldFromNotRightParameter]
+def f():
+    yield from
+[out]
+file:2: error: invalid syntax
+
+[case testYieldFromAfterReturn]
+def f():
+    return yield from h()
+[out]
+file:2: error: invalid syntax
+
+[case testImportDotModule]
+import .x
+[out]
+file:1: error: invalid syntax
+
+[case testImportDot]
+import .
+[out]
+file:1: error: invalid syntax
+
+[case testInvalidFunctionName]
+def while(): pass
+[out]
+file:1: error: invalid syntax
+
+[case testInvalidEllipsis1]
+...0
+..._
+...a
+[out]
+file:1: error: invalid syntax
+
+[case testBlockStatementInSingleLineIf]
+if 1: if 2: pass
+[out]
+file:1: error: invalid syntax
+
+[case testBlockStatementInSingleLineIf2]
+if 1: while 2: pass
+[out]
+file:1: error: invalid syntax
+
+[case testBlockStatementInSingleLineIf3]
+if 1: for x in y: pass
+[out]
+file:1: error: invalid syntax
+
+[case testUnexpectedEllipsis]
+a = a...
+[out]
+file:1: error: invalid syntax
+
+[case testParseErrorBeforeUnicodeLiteral]
+x u'y'
+[out]
+file:1: error: invalid syntax
+
+[case testParseErrorInExtendedSlicing]
+x[:,
+[out]
+file:1: error: unexpected EOF while parsing
+
+[case testParseErrorInExtendedSlicing2]
+x[:,::
+[out]
+file:1: error: unexpected EOF while parsing
+
+[case testParseErrorInExtendedSlicing3]
+x[:,:
+[out]
+file:1: error: unexpected EOF while parsing
+
+[case testPython2OctalIntLiteralInPython3]
+0377
+[out]
+file:1: error: invalid token
+
+[case testInvalidEncoding]
+# foo
+# coding: uft-8
+[out]
+file:0: error: unknown encoding: uft-8
+
+[case testInvalidEncoding2]
+# coding=Uft.8
+[out]
+file:0: error: unknown encoding: Uft.8
+
+[case testInvalidEncoding3]
+#!/usr/bin python
+# vim: set fileencoding=uft8 :
+[out]
+file:0: error: unknown encoding: uft8
+
+[case testDoubleEncoding]
+# coding: uft8
+# coding: utf8
+# The first coding cookie should be used and fail.
+[out]
+file:0: error: unknown encoding: uft8
+
+[case testDoubleEncoding2]
+# Again the first cookie should be used and fail.
+# coding: uft8
+# coding: utf8
+[out]
+file:0: error: unknown encoding: uft8
+
+[case testLongLiteralInPython3]
+2L
+0x2L
+[out]
+file:1: error: invalid syntax
+
+[case testPython2LegacyInequalityInPython3]
+1 <> 2
+[out]
+file:1: error: invalid syntax
+
+[case testLambdaInListComprehensionInPython3]
+([ 0 for x in 1, 2 if 3 ])
+[out]
+file:1: error: invalid syntax
+
+[case testTupleArgListInPython3]
+def f(x, (y, z)): pass
+[out]
+file:1: error: invalid syntax
+
+[case testBackquoteInPython3]
+`1 + 2`
+[out]
+file:1: error: invalid syntax
+
+[case testSmartQuotes]
+foo = ‘bar’
+[out]
+file:1: error: invalid character in identifier
+
+[case testExceptCommaInPython3]
+try:
+    pass
+except KeyError, IndexError:
+    pass
+[out]
+file:3: error: invalid syntax
+
+[case testLocalVarWithTypeOnNextLine]
+x = 0
+  # type: int
+[out]
+file:2: error: misplaced type annotation
diff --git a/test-data/unit/parse-python2.test b/test-data/unit/parse-python2.test
new file mode 100644
index 0000000..7abc157
--- /dev/null
+++ b/test-data/unit/parse-python2.test
@@ -0,0 +1,399 @@
+-- Test cases for parser -- Python 2 syntax.
+--
+-- See parse.test for a description of this file format.
+
+[case testEmptyFile]
+[out]
+MypyFile:1()
+
+[case testStringLiterals]
+'bar'
+u'foo'
+ur'foo'
+u'''bar'''
+b'foo'
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StrExpr(bar))
+  ExpressionStmt:2(
+    UnicodeExpr(foo))
+  ExpressionStmt:3(
+    UnicodeExpr(foo))
+  ExpressionStmt:4(
+    UnicodeExpr(bar))
+  ExpressionStmt:5(
+    StrExpr(foo)))
+
+[case testSimplePrint]
+print 1
+print 2, 3
+print (4, 5)
+[out]
+MypyFile:1(
+  PrintStmt:1(
+    IntExpr(1)
+    Newline)
+  PrintStmt:2(
+    IntExpr(2)
+    IntExpr(3)
+    Newline)
+  PrintStmt:3(
+    TupleExpr:3(
+      IntExpr(4)
+      IntExpr(5))
+    Newline))
+
+[case testPrintWithNoArgs]
+print
+[out]
+MypyFile:1(
+  PrintStmt:1(
+    Newline))
+
+[case testPrintWithTarget]
+print >>foo
+[out]
+MypyFile:1(
+  PrintStmt:1(
+    Target(
+      NameExpr(foo))
+    Newline))
+
+[case testPrintWithTargetAndArgs]
+print >>foo, x
+[out]
+MypyFile:1(
+  PrintStmt:1(
+    NameExpr(x)
+    Target(
+      NameExpr(foo))
+    Newline))
+
+[case testPrintWithTargetAndArgsAndTrailingComma]
+print >>foo, x, y,
+[out]
+MypyFile:1(
+  PrintStmt:1(
+    NameExpr(x)
+    NameExpr(y)
+    Target(
+      NameExpr(foo))))
+
+[case testSimpleWithTrailingComma]
+print 1,
+print 2, 3,
+print (4, 5),
+[out]
+MypyFile:1(
+  PrintStmt:1(
+    IntExpr(1))
+  PrintStmt:2(
+    IntExpr(2)
+    IntExpr(3))
+  PrintStmt:3(
+    TupleExpr:3(
+      IntExpr(4)
+      IntExpr(5))))
+
+[case testOctalIntLiteral]
+00
+01
+0377
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    IntExpr(0))
+  ExpressionStmt:2(
+    IntExpr(1))
+  ExpressionStmt:3(
+    IntExpr(255)))
+
+[case testLongLiteral-skip]
+# see typed_ast issue #26
+0L
+123L
+012L
+0x123l
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    IntExpr(0))
+  ExpressionStmt:2(
+    IntExpr(123))
+  ExpressionStmt:3(
+    IntExpr(10))
+  ExpressionStmt:4(
+    IntExpr(291)))
+
+[case testTryExceptWithComma]
+try:
+    x
+except Exception, e:
+    y
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      ExpressionStmt:2(
+        NameExpr(x)))
+    NameExpr(Exception)
+    NameExpr(e)
+    Block:3(
+      ExpressionStmt:4(
+        NameExpr(y)))))
+
+[case testTryExceptWithNestedComma]
+try:
+    x
+except (KeyError, IndexError):
+    y
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      ExpressionStmt:2(
+        NameExpr(x)))
+    TupleExpr:3(
+      NameExpr(KeyError)
+      NameExpr(IndexError))
+    Block:3(
+      ExpressionStmt:4(
+        NameExpr(y)))))
+
+[case testExecStatement]
+exec a
+[out]
+MypyFile:1(
+  ExecStmt:1(
+    NameExpr(a)))
+
+[case testExecStatementWithIn]
+exec a in globals()
+[out]
+MypyFile:1(
+  ExecStmt:1(
+    NameExpr(a)
+    CallExpr:1(
+      NameExpr(globals)
+      Args())))
+
+[case testExecStatementWithInAnd2Expressions]
+exec a in x, y
+[out]
+MypyFile:1(
+  ExecStmt:1(
+    NameExpr(a)
+    NameExpr(x)
+    NameExpr(y)))
+
+[case testEllipsisInExpression_python2]
+x = ... # E: invalid syntax
+[out]
+
+[case testStrLiteralConcatenationWithMixedLiteralTypes]
+u'foo' 'bar'
+'bar' u'foo'
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    UnicodeExpr(foobar))
+  ExpressionStmt:2(
+    UnicodeExpr(barfoo)))
+
+[case testLegacyInequality]
+1 <> 2
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    ComparisonExpr:1(
+      !=
+      IntExpr(1)
+      IntExpr(2))))
+
+[case testLambdaInListComprehensionInPython2]
+([ 0 for x in 1, 2 if 3 ])
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    ListComprehension:1(
+      GeneratorExpr:1(
+        IntExpr(0)
+        NameExpr(x)
+        TupleExpr:1(
+          IntExpr(1)
+          IntExpr(2))
+        IntExpr(3)))))
+
+[case testTupleArgListInPython2]
+def f(x, (y, z)): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x)
+      Var(__tuple_arg_2))
+    Block:1(
+      AssignmentStmt:1(
+        TupleExpr:1(
+          NameExpr(y)
+          NameExpr(z))
+        NameExpr(__tuple_arg_2))
+      PassStmt:1())))
+
+[case testTupleArgListWithTwoTupleArgsInPython2]
+def f((x, y), (z, zz)): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(__tuple_arg_1)
+      Var(__tuple_arg_2))
+    Block:1(
+      AssignmentStmt:1(
+        TupleExpr:1(
+          NameExpr(x)
+          NameExpr(y))
+        NameExpr(__tuple_arg_1))
+      AssignmentStmt:1(
+        TupleExpr:1(
+          NameExpr(z)
+          NameExpr(zz))
+        NameExpr(__tuple_arg_2))
+      PassStmt:1())))
+
+[case testTupleArgListWithInitializerInPython2]
+def f((y, z) = (1, 2)): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(__tuple_arg_1))
+    Init(
+      AssignmentStmt:1(
+        NameExpr(__tuple_arg_1)
+        TupleExpr:1(
+          IntExpr(1)
+          IntExpr(2))))
+    Block:1(
+      AssignmentStmt:1(
+        TupleExpr:1(
+          NameExpr(y)
+          NameExpr(z))
+        NameExpr(__tuple_arg_1))
+      PassStmt:1())))
+
+[case testLambdaTupleArgListInPython2]
+lambda (x, y): z
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    FuncExpr:1(
+      Args(
+        Var(__tuple_arg_1))
+      Block:1(
+        AssignmentStmt:1(
+          TupleExpr:1(
+            NameExpr(x)
+            NameExpr(y))
+          NameExpr(__tuple_arg_1))
+        ReturnStmt:1(
+          NameExpr(z))))))
+
+[case testLambdaSingletonTupleArgListInPython2]
+lambda (x,): z
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    FuncExpr:1(
+      Args(
+        Var(__tuple_arg_1))
+      Block:1(
+        AssignmentStmt:1(
+          TupleExpr:1(
+            NameExpr(x))
+          NameExpr(__tuple_arg_1))
+        ReturnStmt:1(
+          NameExpr(z))))))
+
+[case testLambdaNoTupleArgListInPython2]
+lambda (x): z
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    FuncExpr:1(
+      Args(
+        Var(x))
+      Block:1(
+        ReturnStmt:1(
+          NameExpr(z))))))
+
+[case testInvalidExprInTupleArgListInPython2_1]
+def f(x, ()): pass
+[out]
+main:1: error: invalid syntax
+
+[case testInvalidExprInTupleArgListInPython2_2]
+def f(x, (y, x[1])): pass
+[out]
+main:1: error: invalid syntax
+
+[case testListLiteralAsTupleArgInPython2]
+def f(x, [x]): pass
+[out]
+main:1: error: invalid syntax
+
+[case testTupleArgAfterStarArgInPython2]
+def f(*a, (b, c)): pass
+[out]
+main:1: error: invalid syntax
+
+[case testTupleArgAfterStarStarArgInPython2]
+def f(*a, (b, c)): pass
+[out]
+main:1: error: invalid syntax
+
+[case testParenthesizedArgumentInPython2]
+def f(x, (y)): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x)
+      Var(y))
+    Block:1(
+      PassStmt:1())))
+
+[case testDuplicateNameInTupleArgList_python2]
+def f(a, (a, b)):
+    pass
+def g((x, (x, y))):
+    pass
+[out]
+main:1: error: duplicate argument 'a' in function definition
+main:3: error: duplicate argument 'x' in function definition
+
+[case testBackquotesInPython2]
+`1 + 2`
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    BackquoteExpr:1(
+      OpExpr:1(
+        +
+        IntExpr(1)
+        IntExpr(2)))))
+
+[case testBackquoteSpecialCasesInPython2]
+`1, 2`
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    BackquoteExpr:1(
+      TupleExpr:1(
+        IntExpr(1)
+        IntExpr(2)))))
diff --git a/test-data/unit/parse.test b/test-data/unit/parse.test
new file mode 100644
index 0000000..4335fff
--- /dev/null
+++ b/test-data/unit/parse.test
@@ -0,0 +1,3386 @@
+-- Test cases for parser. Each test case consists of two sections.
+-- The first section contains [case NAME] followed by the input code, while
+-- the second section contains [out] followed by the output from the parser.
+--
+-- Lines starting with "--" in this file will be ignored, except for lines
+-- starting with "----" that are not ignored. The first two dashes of these
+-- lines are interpreted as escapes and removed.
+
+[case testEmptyFile]
+[out]
+MypyFile:1()
+
+[case testExpressionStatement]
+1
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    IntExpr(1)))
+
+[case testAssignment]
+x = 1
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    IntExpr(1)))
+
+[case testExpressionBasics]
+x = f(1, None)
+123 * (2 + x)
+"hello".lower()
+-1.23
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    CallExpr:1(
+      NameExpr(f)
+      Args(
+        IntExpr(1)
+        NameExpr(None))))
+  ExpressionStmt:2(
+    OpExpr:2(
+      *
+      IntExpr(123)
+      OpExpr:2(
+        +
+        IntExpr(2)
+        NameExpr(x))))
+  ExpressionStmt:3(
+    CallExpr:3(
+      MemberExpr:3(
+        StrExpr(hello)
+        lower)
+      Args()))
+  ExpressionStmt:4(
+    UnaryExpr:4(
+      -
+      FloatExpr(1.23))))
+
+[case testSingleQuotedStr]
+''
+'foo'
+'foo\
+bar'
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StrExpr())
+  ExpressionStmt:2(
+    StrExpr(foo))
+  ExpressionStmt:3(
+    StrExpr(foobar)))
+
+[case testDoubleQuotedStr]
+""
+"foo"
+"foo\
+bar"
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StrExpr())
+  ExpressionStmt:2(
+    StrExpr(foo))
+  ExpressionStmt:3(
+    StrExpr(foobar)))
+
+[case testTripleQuotedStr]
+''''''
+'''foo'''
+'''foo\
+bar'''
+'''\nfoo
+bar'''
+'''fo''bar'''
+""""""
+"""foo"""
+"""foo\
+bar"""
+"""\nfoo
+bar"""
+"""fo""bar"""
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StrExpr())
+  ExpressionStmt:2(
+    StrExpr(foo))
+  ExpressionStmt:3(
+    StrExpr(foobar))
+  ExpressionStmt:5(
+    StrExpr(\u000afoo\u000abar))
+  ExpressionStmt:6(
+    StrExpr(fo''bar))
+  ExpressionStmt:7(
+    StrExpr())
+  ExpressionStmt:8(
+    StrExpr(foo))
+  ExpressionStmt:9(
+    StrExpr(foobar))
+  ExpressionStmt:11(
+    StrExpr(\u000afoo\u000abar))
+  ExpressionStmt:12(
+    StrExpr(fo""bar)))
+
+[case testRawStr]
+r'x\n\''
+r"x\n\""
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StrExpr(x\n\'))
+  ExpressionStmt:2(
+    StrExpr(x\n\")))
+--" fix syntax highlight
+
+[case testBytes]
+b'foo'
+b"foo\
+bar"
+br'x\n\''
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    BytesExpr(foo))
+  ExpressionStmt:2(
+    BytesExpr(foobar))
+  ExpressionStmt:3(
+    BytesExpr(x\\n\\')))
+
+[case testEscapesInStrings]
+'\r\n\t\x2f\u123f'
+b'\r\n\t\x2f\u123f'
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StrExpr(\u000d\u000a\u0009/\u123f))
+  ExpressionStmt:2(
+    BytesExpr(\r\n\t/\\\u123f)))
+-- Note \\u in the b'...' case (\u sequence not translated)
+
+[case testEscapedQuote]
+'\''
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StrExpr(')))
+--'
+
+[case testOctalEscapes]
+'\0\1\177\1234'
+b'\1\476'
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StrExpr(\u0000\u0001\u007fS4))
+  ExpressionStmt:2(
+    BytesExpr(\x01>)))
+
+[case testUnicodeLiteralInPython3]
+u'foo'
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StrExpr(foo)))
+
+[case testArrays]
+a = []
+a = [1, 2]
+a[[1]] = a[2]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a)
+    ListExpr:1())
+  AssignmentStmt:2(
+    NameExpr(a)
+    ListExpr:2(
+      IntExpr(1)
+      IntExpr(2)))
+  AssignmentStmt:3(
+    IndexExpr:3(
+      NameExpr(a)
+      ListExpr:3(
+        IntExpr(1)))
+    IndexExpr:3(
+      NameExpr(a)
+      IntExpr(2))))
+
+[case testTuples]
+()
+(1,)
+(1, foo)
+a, b = 1, (2, 3)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    TupleExpr:1())
+  ExpressionStmt:2(
+    TupleExpr:2(
+      IntExpr(1)))
+  ExpressionStmt:3(
+    TupleExpr:3(
+      IntExpr(1)
+      NameExpr(foo)))
+  AssignmentStmt:4(
+    TupleExpr:4(
+      NameExpr(a)
+      NameExpr(b))
+    TupleExpr:4(
+      IntExpr(1)
+      TupleExpr:4(
+        IntExpr(2)
+        IntExpr(3)))))
+
+[case testSimpleFunction]
+def main():
+  1
+[out]
+MypyFile:1(
+  FuncDef:1(
+    main
+    Block:1(
+      ExpressionStmt:2(
+        IntExpr(1)))))
+
+[case testPass]
+def f():
+    pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      PassStmt:2())))
+
+[case testIf]
+if 1:
+    2
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      IntExpr(1))
+    Then(
+      ExpressionStmt:2(
+        IntExpr(2)))))
+
+[case testIfElse]
+if 1:
+    2
+else:
+    3
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      IntExpr(1))
+    Then(
+      ExpressionStmt:2(
+        IntExpr(2)))
+    Else(
+      ExpressionStmt:4(
+        IntExpr(3)))))
+
+[case testIfElif]
+if 1:
+    2
+elif 3:
+    4
+elif 5:
+    6
+else:
+    7
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      IntExpr(1))
+    Then(
+      ExpressionStmt:2(
+        IntExpr(2)))
+    Else(
+      IfStmt:3(
+        If(
+          IntExpr(3))
+        Then(
+          ExpressionStmt:4(
+            IntExpr(4)))
+        Else(
+          IfStmt:5(
+            If(
+              IntExpr(5))
+            Then(
+              ExpressionStmt:6(
+                IntExpr(6)))
+            Else(
+              ExpressionStmt:8(
+                IntExpr(7)))))))))
+
+[case testWhile]
+while 1:
+    pass
+[out]
+MypyFile:1(
+  WhileStmt:1(
+    IntExpr(1)
+    Block:1(
+      PassStmt:2())))
+
+[case testReturn]
+def f():
+    return 1
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ReturnStmt:2(
+        IntExpr(1)))))
+
+
+[case testReturnWithoutValue]
+def f():
+    return
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ReturnStmt:2())))
+
+[case testBreak]
+while 1:
+    break
+[out]
+MypyFile:1(
+  WhileStmt:1(
+    IntExpr(1)
+    Block:1(
+      BreakStmt:2())))
+
+[case testLargeBlock]
+if 1:
+    x = 1
+    while 2:
+        pass
+    y = 2
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      IntExpr(1))
+    Then(
+      AssignmentStmt:2(
+        NameExpr(x)
+        IntExpr(1))
+      WhileStmt:3(
+        IntExpr(2)
+        Block:3(
+          PassStmt:4()))
+      AssignmentStmt:5(
+        NameExpr(y)
+        IntExpr(2)))))
+
+[case testSimpleClass]
+class A:
+    def f(self):
+        pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      f
+      Args(
+        Var(self))
+      Block:2(
+        PassStmt:3()))))
+
+[case testGlobalVarWithType]
+x = 0 # type: int
+y = False # type: bool
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    IntExpr(0)
+    int?)
+  AssignmentStmt:2(
+    NameExpr(y)
+    NameExpr(False)
+    bool?))
+
+[case testLocalVarWithType]
+def f():
+  x = 0 # type: int
+  y = False # type: bool
+  a = None # type: Any
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      AssignmentStmt:2(
+        NameExpr(x)
+        IntExpr(0)
+        int?)
+      AssignmentStmt:3(
+        NameExpr(y)
+        NameExpr(False)
+        bool?)
+      AssignmentStmt:4(
+        NameExpr(a)
+        NameExpr(None)
+        Any?))))
+
+[case testFunctionDefWithType]
+def f(y: str) -> int:
+  return
+class A:
+  def f(self, a: int, b: Any) -> x:
+    pass
+  def g(self) -> Any:
+    pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(y))
+    def (y: str?) -> int?
+    Block:1(
+      ReturnStmt:2()))
+  ClassDef:3(
+    A
+    FuncDef:4(
+      f
+      Args(
+        Var(self)
+        Var(a)
+        Var(b))
+      def (self: Any, a: int?, b: Any?) -> x?
+      Block:4(
+        PassStmt:5()))
+    FuncDef:6(
+      g
+      Args(
+        Var(self))
+      def (self: Any) -> Any?
+      Block:6(
+        PassStmt:7()))))
+
+[case testFuncWithNoneReturn]
+def f() -> None:
+  pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def () -> None?
+    Block:1(
+      PassStmt:2())))
+
+[case testVarDefWithGenericType]
+x = None # type: List[str]
+y = None # type: Dict[int, Any]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    NameExpr(None)
+    List?[str?])
+  AssignmentStmt:2(
+    NameExpr(y)
+    NameExpr(None)
+    Dict?[int?, Any?]))
+
+[case testSignatureWithGenericTypes]
+def f(y: t[Any, x]) -> a[b[c], d]:
+    pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(y))
+    def (y: t?[Any?, x?]) -> a?[b?[c?], d?]
+    Block:1(
+      PassStmt:2())))
+
+[case testParsingExpressionsWithLessAndGreaterThan]
+# The operators < > can sometimes be confused with generic types.
+x = a < b > c
+f(x < b, y > c)
+a < b > 1
+x < b, y > 2
+(a < b > c)
+[out]
+MypyFile:1(
+  AssignmentStmt:2(
+    NameExpr(x)
+    ComparisonExpr:2(
+      <
+      >
+      NameExpr(a)
+      NameExpr(b)
+      NameExpr(c)))
+  ExpressionStmt:3(
+    CallExpr:3(
+      NameExpr(f)
+      Args(
+        ComparisonExpr:3(
+          <
+          NameExpr(x)
+          NameExpr(b))
+        ComparisonExpr:3(
+          >
+          NameExpr(y)
+          NameExpr(c)))))
+  ExpressionStmt:4(
+    ComparisonExpr:4(
+      <
+      >
+      NameExpr(a)
+      NameExpr(b)
+      IntExpr(1)))
+  ExpressionStmt:5(
+    TupleExpr:5(
+      ComparisonExpr:5(
+        <
+        NameExpr(x)
+        NameExpr(b))
+      ComparisonExpr:5(
+        >
+        NameExpr(y)
+        IntExpr(2))))
+  ExpressionStmt:6(
+    ComparisonExpr:6(
+      <
+      >
+      NameExpr(a)
+      NameExpr(b)
+      NameExpr(c))))
+
+[case testLineContinuation]
+if (1 +
+    2):
+  pass
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      OpExpr:1(
+        +
+        IntExpr(1)
+        IntExpr(2)))
+    Then(
+      PassStmt:3())))
+
+[case testMultipleVarDef]
+x, y = z # type: int, a[c]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    TupleExpr:1(
+      NameExpr(x)
+      NameExpr(y))
+    NameExpr(z)
+    Tuple[int?, a?[c?]]))
+
+[case testMultipleVarDef2]
+(xx, z, i) = 1 # type: (a[c], Any, int)
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    TupleExpr:1(
+      NameExpr(xx)
+      NameExpr(z)
+      NameExpr(i))
+    IntExpr(1)
+    Tuple[a?[c?], Any?, int?]))
+
+[case testMultipleVarDef3]
+(xx, (z, i)) = 1 # type: (a[c], (Any, int))
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    TupleExpr:1(
+      NameExpr(xx)
+      TupleExpr:1(
+        NameExpr(z)
+        NameExpr(i)))
+    IntExpr(1)
+    Tuple[a?[c?], Tuple[Any?, int?]]))
+
+[case testAnnotateAssignmentViaSelf]
+class A:
+    def __init__(self):
+        self.x = 1 # type: int
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      __init__
+      Args(
+        Var(self))
+      Block:2(
+        AssignmentStmt:3(
+          MemberExpr:3(
+            NameExpr(self)
+            x)
+          IntExpr(1)
+          int?)))))
+
+[case testCommentAfterTypeComment]
+x = 0 # type: int # bar!
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    IntExpr(0)
+    int?))
+
+[case testMultilineAssignmentAndAnnotations]
+(x,
+ y) = (1,
+      2) # type: foo, bar
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    TupleExpr:1(
+      NameExpr(x)
+      NameExpr(y))
+    TupleExpr:2(
+      IntExpr(1)
+      IntExpr(2))
+    Tuple[foo?, bar?]))
+
+[case testWhitespaceAndCommentAnnotation]
+x = 1#type:int
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    IntExpr(1)
+    int?))
+
+[case testWhitespaceAndCommentAnnotation2]
+x = 1#   type:   int
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    IntExpr(1)
+    int?))
+
+[case testWhitespaceAndCommentAnnotation3]
+x = 1# type : int       # not recognized!
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    IntExpr(1)))
+
+[case testInvalidAnnotation]
+x=1 ##type: int
+y=1 #.type: int
+z=1 # Type: int
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    IntExpr(1))
+  AssignmentStmt:2(
+    NameExpr(y)
+    IntExpr(1))
+  AssignmentStmt:3(
+    NameExpr(z)
+    IntExpr(1)))
+
+[case testEmptyClass]
+class C:
+  pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    C
+    PassStmt:2()))
+
+[case testOperatorPrecedence]
+a | b ^ c
+a & b << c
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    OpExpr:1(
+      |
+      NameExpr(a)
+      OpExpr:1(
+        ^
+        NameExpr(b)
+        NameExpr(c))))
+  ExpressionStmt:2(
+    OpExpr:2(
+      &
+      NameExpr(a)
+      OpExpr:2(
+        <<
+        NameExpr(b)
+        NameExpr(c)))))
+
+[case testOperatorAssociativity]
+1 - 2 + 3
+1 << 2 << 3
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    OpExpr:1(
+      +
+      OpExpr:1(
+        -
+        IntExpr(1)
+        IntExpr(2))
+      IntExpr(3)))
+  ExpressionStmt:2(
+    OpExpr:2(
+      <<
+      OpExpr:2(
+        <<
+        IntExpr(1)
+        IntExpr(2))
+      IntExpr(3))))
+
+[case testUnaryOperators]
+-2 * +3 * ~3 * 2
+~3**2
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    OpExpr:1(
+      *
+      OpExpr:1(
+        *
+        OpExpr:1(
+          *
+          UnaryExpr:1(
+            -
+            IntExpr(2))
+          UnaryExpr:1(
+            +
+            IntExpr(3)))
+        UnaryExpr:1(
+          ~
+          IntExpr(3)))
+      IntExpr(2)))
+  ExpressionStmt:2(
+    UnaryExpr:2(
+      ~
+      OpExpr:2(
+        **
+        IntExpr(3)
+        IntExpr(2)))))
+
+[case testSingleLineBodies]
+if 1: pass
+while 1: pass
+def f(): pass
+def g() -> int: return 1
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      IntExpr(1))
+    Then(
+      PassStmt:1()))
+  WhileStmt:2(
+    IntExpr(1)
+    Block:2(
+      PassStmt:2()))
+  FuncDef:3(
+    f
+    Block:3(
+      PassStmt:3()))
+  FuncDef:4(
+    g
+    def () -> int?
+    Block:4(
+      ReturnStmt:4(
+        IntExpr(1)))))
+
+[case testForStatement]
+for x in y:
+  pass
+for x, (y, w) in z:
+  1
+for [x, (y, w)] in z:
+  1
+[out]
+MypyFile:1(
+  ForStmt:1(
+    NameExpr(x)
+    NameExpr(y)
+    Block:1(
+      PassStmt:2()))
+  ForStmt:3(
+    TupleExpr:3(
+      NameExpr(x)
+      TupleExpr:3(
+        NameExpr(y)
+        NameExpr(w)))
+    NameExpr(z)
+    Block:3(
+      ExpressionStmt:4(
+        IntExpr(1))))
+  ForStmt:5(
+    ListExpr:5(
+      NameExpr(x)
+      TupleExpr:5(
+        NameExpr(y)
+        NameExpr(w)))
+    NameExpr(z)
+    Block:5(
+      ExpressionStmt:6(
+        IntExpr(1)))))
+
+[case testGlobalDecl]
+global x
+def f():
+  global x, y
+[out]
+MypyFile:1(
+  GlobalDecl:1(
+    x)
+  FuncDef:2(
+    f
+    Block:2(
+      GlobalDecl:3(
+        x
+        y))))
+
+[case testNonlocalDecl]
+def f():
+  def g():
+    nonlocal x, y
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      FuncDef:2(
+        g
+        Block:2(
+          NonlocalDecl:3(
+            x
+            y))))))
+
+[case testRaiseStatement]
+raise foo
+[out]
+MypyFile:1(
+  RaiseStmt:1(
+    NameExpr(foo)))
+
+[case testRaiseWithoutArg]
+try:
+  pass
+except:
+  raise
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      PassStmt:2())
+    Block:3(
+      RaiseStmt:4())))
+
+[case testRaiseFrom]
+raise e from x
+[out]
+MypyFile:1(
+  RaiseStmt:1(
+    NameExpr(e)
+    NameExpr(x)))
+
+[case testBaseclasses]
+class A(B):
+  pass
+class A(B[T], C[Any, d[x]]):
+  pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    BaseTypeExpr(
+      NameExpr(B))
+    PassStmt:2())
+  ClassDef:3(
+    A
+    BaseTypeExpr(
+      IndexExpr:3(
+        NameExpr(B)
+        NameExpr(T))
+      IndexExpr:3(
+        NameExpr(C)
+        TupleExpr:3(
+          NameExpr(Any)
+          IndexExpr:3(
+            NameExpr(d)
+            NameExpr(x)))))
+    PassStmt:4()))
+
+[case testIsNot]
+x is not y
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    ComparisonExpr:1(
+      is not
+      NameExpr(x)
+      NameExpr(y))))
+
+[case testNotIn]
+x not in y
+not x not in y
+x not in y | z
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    ComparisonExpr:1(
+      not in
+      NameExpr(x)
+      NameExpr(y)))
+  ExpressionStmt:2(
+    UnaryExpr:2(
+      not
+      ComparisonExpr:2(
+        not in
+        NameExpr(x)
+        NameExpr(y))))
+  ExpressionStmt:3(
+    ComparisonExpr:3(
+      not in
+      NameExpr(x)
+      OpExpr:3(
+        |
+        NameExpr(y)
+        NameExpr(z)))))
+
+[case testNotAsBinaryOp]
+x not y # E: invalid syntax
+[out]
+
+[case testNotIs]
+x not is y # E: invalid syntax
+[out]
+
+[case testBinaryNegAsBinaryOp]
+1 ~ 2 # E: invalid syntax
+[out]
+
+[case testDictionaryExpression]
+{}
+{1:x}
+{1:x, 2 or 1:2 and 3}
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    DictExpr:1())
+  ExpressionStmt:2(
+    DictExpr:2(
+      IntExpr(1)
+      NameExpr(x)))
+  ExpressionStmt:3(
+    DictExpr:3(
+      IntExpr(1)
+      NameExpr(x)
+      OpExpr:3(
+        or
+        IntExpr(2)
+        IntExpr(1))
+      OpExpr:3(
+        and
+        IntExpr(2)
+        IntExpr(3)))))
+
+[case testImport]
+import x
+import y.z.foo, __foo__.bar
+[out]
+MypyFile:1(
+  Import:1(x)
+  Import:2(y.z.foo, __foo__.bar))
+
+[case testVariableTypeWithQualifiedName]
+x = None # type: x.y
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    NameExpr(None)
+    x.y?))
+
+[case testTypeInSignatureWithQualifiedName]
+def f() -> x.y[a.b.c]: pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def () -> x.y?[a.b.c?]
+    Block:1(
+      PassStmt:1())))
+
+[case testImportFrom]
+from m import x
+from m.n import x, y, z
+[out]
+MypyFile:1(
+  ImportFrom:1(m, [x])
+  ImportFrom:2(m.n, [x, y, z]))
+
+[case testImportFromAs]
+from m import x as y
+from x import y, z as a, c as c
+[out]
+MypyFile:1(
+  ImportFrom:1(m, [x : y])
+  ImportFrom:2(x, [y, z : a, c : c]))
+
+[case testImportStar]
+from x import *
+[out]
+MypyFile:1(
+  ImportAll:1(x))
+
+[case testImportsInDifferentPlaces]
+1
+import x
+def f():
+  from x import y
+  from z import *
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    IntExpr(1))
+  Import:2(x)
+  FuncDef:3(
+    f
+    Block:3(
+      ImportFrom:4(x, [y])
+      ImportAll:5(z))))
+
+[case testImportWithExtraComma]
+from x import (y, z,)
+[out]
+MypyFile:1(
+  ImportFrom:1(x, [y, z]))
+
+[case testDefaultArgs]
+def f(x=1):
+  pass
+def g(x, y=1+2, z=(1, 2)):
+  pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    Init(
+      AssignmentStmt:1(
+        NameExpr(x)
+        IntExpr(1)))
+    Block:1(
+      PassStmt:2()))
+  FuncDef:3(
+    g
+    Args(
+      Var(x)
+      Var(y)
+      Var(z))
+    Init(
+      AssignmentStmt:3(
+        NameExpr(y)
+        OpExpr:3(
+          +
+          IntExpr(1)
+          IntExpr(2)))
+      AssignmentStmt:3(
+        NameExpr(z)
+        TupleExpr:3(
+          IntExpr(1)
+          IntExpr(2))))
+    Block:3(
+      PassStmt:4())))
+
+[case testTryFinally]
+try:
+  1
+finally:
+  2
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      ExpressionStmt:2(
+        IntExpr(1)))
+    Finally(
+      ExpressionStmt:4(
+        IntExpr(2)))))
+
+[case testTry]
+try:
+  1
+except x:
+  2
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      ExpressionStmt:2(
+        IntExpr(1)))
+    NameExpr(x)
+    Block:3(
+      ExpressionStmt:4(
+        IntExpr(2)))))
+
+[case testComplexTry]
+try:
+  1
+except x as y:
+  2
+except x.y:
+  3
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      ExpressionStmt:2(
+        IntExpr(1)))
+    NameExpr(x)
+    NameExpr(y)
+    Block:3(
+      ExpressionStmt:4(
+        IntExpr(2)))
+    MemberExpr:5(
+      NameExpr(x)
+      y)
+    Block:5(
+      ExpressionStmt:6(
+        IntExpr(3)))))
+
+[case testGeneratorExpression]
+(x for y in z)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    GeneratorExpr:1(
+      NameExpr(x)
+      NameExpr(y)
+      NameExpr(z))))
+
+[case testGeneratorExpressionNested]
+(x for y, (p, q) in z)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    GeneratorExpr:1(
+      NameExpr(x)
+      TupleExpr:1(
+        NameExpr(y)
+        TupleExpr:1(
+          NameExpr(p)
+          NameExpr(q)))
+      NameExpr(z))))
+
+[case testListComprehension]
+x=[x for y in z]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    ListComprehension:1(
+      GeneratorExpr:1(
+        NameExpr(x)
+        NameExpr(y)
+        NameExpr(z)))))
+
+[case testComplexListComprehension]
+x=[(x, y) for y, z in (1, 2)]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    ListComprehension:1(
+      GeneratorExpr:1(
+        TupleExpr:1(
+          NameExpr(x)
+          NameExpr(y))
+        TupleExpr:1(
+          NameExpr(y)
+          NameExpr(z))
+        TupleExpr:1(
+          IntExpr(1)
+          IntExpr(2))))))
+
+[case testListComprehension2]
+([x + 1 for x in a])
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    ListComprehension:1(
+      GeneratorExpr:1(
+        OpExpr:1(
+          +
+          NameExpr(x)
+          IntExpr(1))
+        NameExpr(x)
+        NameExpr(a)))))
+
+[case testSlices]
+x[1:2]
+x[:1]
+x[1:]
+x[:]
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    IndexExpr:1(
+      NameExpr(x)
+      SliceExpr:-1(
+        IntExpr(1)
+        IntExpr(2))))
+  ExpressionStmt:2(
+    IndexExpr:2(
+      NameExpr(x)
+      SliceExpr:-1(
+        <empty>
+        IntExpr(1))))
+  ExpressionStmt:3(
+    IndexExpr:3(
+      NameExpr(x)
+      SliceExpr:-1(
+        IntExpr(1)
+        <empty>)))
+  ExpressionStmt:4(
+    IndexExpr:4(
+      NameExpr(x)
+      SliceExpr:-1(
+        <empty>
+        <empty>))))
+
+[case testSliceWithStride]
+x[1:2:3]
+x[1::2]
+x[:1:2]
+x[::2]
+x[1:2:]
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    IndexExpr:1(
+      NameExpr(x)
+      SliceExpr:-1(
+        IntExpr(1)
+        IntExpr(2)
+        IntExpr(3))))
+  ExpressionStmt:2(
+    IndexExpr:2(
+      NameExpr(x)
+      SliceExpr:-1(
+        IntExpr(1)
+        <empty>
+        IntExpr(2))))
+  ExpressionStmt:3(
+    IndexExpr:3(
+      NameExpr(x)
+      SliceExpr:-1(
+        <empty>
+        IntExpr(1)
+        IntExpr(2))))
+  ExpressionStmt:4(
+    IndexExpr:4(
+      NameExpr(x)
+      SliceExpr:-1(
+        <empty>
+        <empty>
+        IntExpr(2))))
+  ExpressionStmt:5(
+    IndexExpr:5(
+      NameExpr(x)
+      SliceExpr:-1(
+        IntExpr(1)
+        IntExpr(2)))))
+
+[case testYield]
+def f():
+    yield x + 1
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ExpressionStmt:2(
+        YieldExpr:2(
+          OpExpr:2(
+            +
+            NameExpr(x)
+            IntExpr(1)))))))
+
+[case testYieldFrom]
+def f():
+    yield from h()
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ExpressionStmt:2(
+        YieldFromExpr:2(
+          CallExpr:2(
+            NameExpr(h)
+            Args()))))))
+
+[case testYieldFromAssignment]
+def f():
+    a = yield from h()
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      AssignmentStmt:2(
+        NameExpr(a)
+        YieldFromExpr:2(
+          CallExpr:2(
+            NameExpr(h)
+            Args()))))))
+
+[case testDel]
+del x
+del x[0], y[1]
+[out]
+MypyFile:1(
+  DelStmt:1(
+    NameExpr(x))
+  DelStmt:2(
+    TupleExpr:2(
+      IndexExpr:2(
+        NameExpr(x)
+        IntExpr(0))
+      IndexExpr:2(
+        NameExpr(y)
+        IntExpr(1)))))
+
+[case testExtraCommas]
+1, 2,
++[1, 2,]
+f(1,)
+{1:2,}
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    TupleExpr:1(
+      IntExpr(1)
+      IntExpr(2)))
+  ExpressionStmt:2(
+    UnaryExpr:2(
+      +
+      ListExpr:2(
+        IntExpr(1)
+        IntExpr(2))))
+  ExpressionStmt:3(
+    CallExpr:3(
+      NameExpr(f)
+      Args(
+        IntExpr(1))))
+  ExpressionStmt:4(
+    DictExpr:4(
+      IntExpr(1)
+      IntExpr(2))))
+
+[case testExtraCommaInFunc]
+def f(x,):
+  pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    Block:1(
+      PassStmt:2())))
+
+[case testLambda]
+lambda: 1
+lambda x: y + 1
+lambda x, y: 1
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    FuncExpr:1(
+      Block:1(
+        ReturnStmt:1(
+          IntExpr(1)))))
+  ExpressionStmt:2(
+    FuncExpr:2(
+      Args(
+        Var(x))
+      Block:2(
+        ReturnStmt:2(
+          OpExpr:2(
+            +
+            NameExpr(y)
+            IntExpr(1))))))
+  ExpressionStmt:3(
+    FuncExpr:3(
+      Args(
+        Var(x)
+        Var(y))
+      Block:3(
+        ReturnStmt:3(
+          IntExpr(1))))))
+
+[case testComplexLambda]
+lambda x=2: x
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    FuncExpr:1(
+      Args(
+        Var(x))
+      Init(
+        AssignmentStmt:1(
+          NameExpr(x)
+          IntExpr(2)))
+      Block:1(
+        ReturnStmt:1(
+          NameExpr(x))))))
+
+[case testLambdaPrecedence]
+lambda x: 1, 2
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    TupleExpr:1(
+      FuncExpr:1(
+        Args(
+          Var(x))
+        Block:1(
+          ReturnStmt:1(
+            IntExpr(1))))
+      IntExpr(2))))
+
+[case testForIndicesInParens]
+for (i, j) in x:
+  pass
+[out]
+MypyFile:1(
+  ForStmt:1(
+    TupleExpr:1(
+      NameExpr(i)
+      NameExpr(j))
+    NameExpr(x)
+    Block:1(
+      PassStmt:2())))
+
+[case testForAndTrailingCommaAfterIndexVar]
+for i, in x:
+    pass
+[out]
+MypyFile:1(
+  ForStmt:1(
+    TupleExpr:1(
+      NameExpr(i))
+    NameExpr(x)
+    Block:1(
+      PassStmt:2())))
+
+[case testListComprehensionAndTrailingCommaAfterIndexVar]
+x = [a for b, in c]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    ListComprehension:1(
+      GeneratorExpr:1(
+        NameExpr(a)
+        TupleExpr:1(
+          NameExpr(b))
+        NameExpr(c)))))
+
+[case testForAndTrailingCommaAfterIndexVars]
+for i, j, in x:
+    pass
+[out]
+MypyFile:1(
+  ForStmt:1(
+    TupleExpr:1(
+      NameExpr(i)
+      NameExpr(j))
+    NameExpr(x)
+    Block:1(
+      PassStmt:2())))
+
+[case testGeneratorWithCondition]
+(x for y in z if 0)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    GeneratorExpr:1(
+      NameExpr(x)
+      NameExpr(y)
+      NameExpr(z)
+      IntExpr(0))))
+
+[case testListComprehensionWithCondition]
+raise [x for y in z if 0]
+[out]
+MypyFile:1(
+  RaiseStmt:1(
+    ListComprehension:1(
+      GeneratorExpr:1(
+        NameExpr(x)
+        NameExpr(y)
+        NameExpr(z)
+        IntExpr(0)))))
+
+[case testListComprehensionWithConditions]
+raise [x for y in z if 0 if 1]
+[out]
+MypyFile:1(
+  RaiseStmt:1(
+    ListComprehension:1(
+      GeneratorExpr:1(
+        NameExpr(x)
+        NameExpr(y)
+        NameExpr(z)
+        IntExpr(0)
+        IntExpr(1)))))
+
+[case testListComprehensionWithCrazyConditions]
+raise [x for y in z if (1 if 2 else 3) if 1]
+[out]
+MypyFile:1(
+  RaiseStmt:1(
+    ListComprehension:1(
+      GeneratorExpr:1(
+        NameExpr(x)
+        NameExpr(y)
+        NameExpr(z)
+        ConditionalExpr:1(
+          Condition(
+            IntExpr(2))
+          IntExpr(1)
+          IntExpr(3))
+        IntExpr(1)))))
+
+[case testDictionaryComprehension]
+a = {x: y for x, y in xys}
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a)
+    DictionaryComprehension:1(
+      NameExpr(x)
+      NameExpr(y)
+      TupleExpr:1(
+        NameExpr(x)
+        NameExpr(y))
+      NameExpr(xys))))
+
+[case testDictionaryComprehensionComplex]
+a = {x: y for x, y in xys for p, q in pqs if c}
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a)
+    DictionaryComprehension:1(
+      NameExpr(x)
+      NameExpr(y)
+      TupleExpr:1(
+        NameExpr(x)
+        NameExpr(y))
+      TupleExpr:1(
+        NameExpr(p)
+        NameExpr(q))
+      NameExpr(xys)
+      NameExpr(pqs)
+      NameExpr(c))))
+
+[case testSetComprehension]
+a = {i for i in l}
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a)
+    SetComprehension:1(
+      GeneratorExpr:1(
+        NameExpr(i)
+        NameExpr(i)
+        NameExpr(l)))))
+
+[case testSetComprehensionComplex]
+a = {x + p for x in xys for p in pqs if c}
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a)
+    SetComprehension:1(
+      GeneratorExpr:1(
+        OpExpr:1(
+          +
+          NameExpr(x)
+          NameExpr(p))
+        NameExpr(x)
+        NameExpr(p)
+        NameExpr(xys)
+        NameExpr(pqs)
+        NameExpr(c)))))
+
+[case testWithStatement]
+with open('foo') as f:
+  pass
+[out]
+MypyFile:1(
+  WithStmt:1(
+    Expr(
+      CallExpr:1(
+        NameExpr(open)
+        Args(
+          StrExpr(foo))))
+    Target(
+      NameExpr(f))
+    Block:1(
+      PassStmt:2())))
+
+[case testWithStatementWithoutTarget]
+with foo:
+  pass
+[out]
+MypyFile:1(
+  WithStmt:1(
+    Expr(
+      NameExpr(foo))
+    Block:1(
+      PassStmt:2())))
+
+[case testHexOctBinLiterals]
+0xa, 0Xaf, 0o7, 0O12, 0b1, 0B101
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    TupleExpr:1(
+      IntExpr(10)
+      IntExpr(175)
+      IntExpr(7)
+      IntExpr(10)
+      IntExpr(1)
+      IntExpr(5))))
+
+[case testImportFromWithParens]
+from x import (y)
+from x import (y,
+               z)
+[out]
+MypyFile:1(
+  ImportFrom:1(x, [y])
+  ImportFrom:2(x, [y, z]))
+
+[case testContinueStmt]
+while 1:
+  continue
+[out]
+MypyFile:1(
+  WhileStmt:1(
+    IntExpr(1)
+    Block:1(
+      ContinueStmt:2())))
+
+[case testStrLiteralConcatenate]
+'f' 'bar'
+('x'
+ 'y'
+ 'z')
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StrExpr(fbar))
+  ExpressionStmt:2(
+    StrExpr(xyz)))
+
+[case testCatchAllExcept]
+try:
+  1
+except:
+  pass
+try:
+  1
+except x:
+  pass
+except:
+  2
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      ExpressionStmt:2(
+        IntExpr(1)))
+    Block:3(
+      PassStmt:4()))
+  TryStmt:5(
+    Block:5(
+      ExpressionStmt:6(
+        IntExpr(1)))
+    NameExpr(x)
+    Block:7(
+      PassStmt:8())
+    Block:9(
+      ExpressionStmt:10(
+        IntExpr(2)))))
+
+[case testTryElse]
+try:
+  pass
+except x:
+  1
+else:
+  2
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      PassStmt:2())
+    NameExpr(x)
+    Block:3(
+      ExpressionStmt:4(
+        IntExpr(1)))
+    Else(
+      ExpressionStmt:6(
+        IntExpr(2)))))
+
+[case testExceptWithMultipleTypes]
+try:
+  pass
+except (x, y):
+  pass
+except (a, b, c) as e:
+  pass
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      PassStmt:2())
+    TupleExpr:3(
+      NameExpr(x)
+      NameExpr(y))
+    Block:3(
+      PassStmt:4())
+    TupleExpr:5(
+      NameExpr(a)
+      NameExpr(b)
+      NameExpr(c))
+    NameExpr(e)
+    Block:5(
+      PassStmt:6())))
+
+[case testNestedFunctions]
+def f():
+  def g():
+    pass
+def h() -> int:
+  def g() -> int:
+    pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      FuncDef:2(
+        g
+        Block:2(
+          PassStmt:3()))))
+  FuncDef:4(
+    h
+    def () -> int?
+    Block:4(
+      FuncDef:5(
+        g
+        def () -> int?
+        Block:5(
+          PassStmt:6())))))
+
+[case testStatementsAndDocStringsInClassBody]
+class A:
+  "doc string"
+  x = y
+  def f(self):
+    pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    ExpressionStmt:2(
+      StrExpr(doc string))
+    AssignmentStmt:3(
+      NameExpr(x)
+      NameExpr(y))
+    FuncDef:4(
+      f
+      Args(
+        Var(self))
+      Block:4(
+        PassStmt:5()))))
+
+[case testSingleLineClass]
+class a: pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    a
+    PassStmt:1()))
+
+[case testDecorator]
+ at property
+def f():
+  pass
+[out]
+MypyFile:1(
+  Decorator:1(
+    Var(f)
+    NameExpr(property)
+    FuncDef:2(
+      f
+      Block:2(
+        PassStmt:3()))))
+
+[case testComplexDecorator]
+ at foo(bar, 1)
+ at zar
+def f() -> int:
+  pass
+[out]
+MypyFile:1(
+  Decorator:1(
+    Var(f)
+    CallExpr:1(
+      NameExpr(foo)
+      Args(
+        NameExpr(bar)
+        IntExpr(1)))
+    NameExpr(zar)
+    FuncDef:3(
+      f
+      def () -> int?
+      Block:3(
+        PassStmt:4()))))
+
+[case testKeywordArgInCall]
+f(x=1)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    CallExpr:1(
+      NameExpr(f)
+      Args()
+      KwArgs(
+        x
+        IntExpr(1)))))
+
+[case testComplexKeywordArgs]
+f(x, y=1 or 2, z=y)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    CallExpr:1(
+      NameExpr(f)
+      Args(
+        NameExpr(x))
+      KwArgs(
+        y
+        OpExpr:1(
+          or
+          IntExpr(1)
+          IntExpr(2)))
+      KwArgs(
+        z
+        NameExpr(y)))))
+
+[case testChainedAssignment]
+x = z = 1
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x)
+      NameExpr(z))
+    IntExpr(1)))
+
+[case testVarArgs]
+def f(x, *a): pass
+f(1, *2)
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    VarArg(
+      Var(a))
+    Block:1(
+      PassStmt:1()))
+  ExpressionStmt:2(
+    CallExpr:2(
+      NameExpr(f)
+      Args(
+        IntExpr(1)
+        IntExpr(2))
+      VarArg)))
+
+[case testVarArgWithType]
+def f(x: str, *a: int): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    def (x: str?, *a: int?) -> Any
+    VarArg(
+      Var(a))
+    Block:1(
+      PassStmt:1())))
+
+[case testDictVarArgs]
+def f(x, **a): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    DictVarArg(
+      Var(a))
+    Block:1(
+      PassStmt:1())))
+
+[case testBothVarArgs]
+def f(x, *a, **b): pass
+def g(*a, **b): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    VarArg(
+      Var(a))
+    DictVarArg(
+      Var(b))
+    Block:1(
+      PassStmt:1()))
+  FuncDef:2(
+    g
+    VarArg(
+      Var(a))
+    DictVarArg(
+      Var(b))
+    Block:2(
+      PassStmt:2())))
+
+[case testDictVarArgsWithType]
+def f(x: X, **a: A) -> None: pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    def (x: X?, **a: A?) -> None?
+    DictVarArg(
+      Var(a))
+    Block:1(
+      PassStmt:1())))
+
+[case testCallDictVarArgs]
+f(**x)
+f(x, **y)
+f(*x, **y)
+f(x, *y, **z)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    CallExpr:1(
+      NameExpr(f)
+      Args()
+      DictVarArg(
+        NameExpr(x))))
+  ExpressionStmt:2(
+    CallExpr:2(
+      NameExpr(f)
+      Args(
+        NameExpr(x))
+      DictVarArg(
+        NameExpr(y))))
+  ExpressionStmt:3(
+    CallExpr:3(
+      NameExpr(f)
+      Args(
+        NameExpr(x))
+      VarArg
+      DictVarArg(
+        NameExpr(y))))
+  ExpressionStmt:4(
+    CallExpr:4(
+      NameExpr(f)
+      Args(
+        NameExpr(x)
+        NameExpr(y))
+      VarArg
+      DictVarArg(
+        NameExpr(z)))))
+
+[case testAssert]
+assert x == y
+[out]
+MypyFile:1(
+  AssertStmt:1(
+    ComparisonExpr:1(
+      ==
+      NameExpr(x)
+      NameExpr(y))))
+
+[case testYieldWithoutExpressions]
+def f():
+  yield
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ExpressionStmt:2(
+        YieldExpr:2()))))
+
+[case testConditionalExpression]
+x if y else z
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    ConditionalExpr:1(
+      Condition(
+        NameExpr(y))
+      NameExpr(x)
+      NameExpr(z))))
+
+[case testConditionalExpressionInListComprehension]
+a = [x if y else z for a in b]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a)
+    ListComprehension:1(
+      GeneratorExpr:1(
+        ConditionalExpr:1(
+          Condition(
+            NameExpr(y))
+          NameExpr(x)
+          NameExpr(z))
+        NameExpr(a)
+        NameExpr(b)))))
+
+[case testConditionalExpressionInTuple]
+1 if 2 else 3, 4
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    TupleExpr:1(
+      ConditionalExpr:1(
+        Condition(
+          IntExpr(2))
+        IntExpr(1)
+        IntExpr(3))
+      IntExpr(4))))
+
+[case testSetLiteral]
+{x or y}
+{1, 2}
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    SetExpr:1(
+      OpExpr:1(
+        or
+        NameExpr(x)
+        NameExpr(y))))
+  ExpressionStmt:2(
+    SetExpr:2(
+      IntExpr(1)
+      IntExpr(2))))
+
+[case testSetLiteralWithExtraComma]
+{x,}
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    SetExpr:1(
+      NameExpr(x))))
+
+[case testImportAs]
+import x as y
+import x, z as y, a.b as c, d as d
+[out]
+MypyFile:1(
+  Import:1(x : y)
+  Import:2(x, z : y, a.b : c, d : d))
+
+[case testForAndElse]
+for x in y:
+  pass
+else:
+  x
+[out]
+MypyFile:1(
+  ForStmt:1(
+    NameExpr(x)
+    NameExpr(y)
+    Block:1(
+      PassStmt:2())
+    Else(
+      ExpressionStmt:4(
+        NameExpr(x)))))
+
+[case testWhileAndElse]
+while x:
+  pass
+else:
+  y
+[out]
+MypyFile:1(
+  WhileStmt:1(
+    NameExpr(x)
+    Block:1(
+      PassStmt:2())
+    Else(
+      ExpressionStmt:4(
+        NameExpr(y)))))
+
+[case testWithAndMultipleOperands]
+with x as y, a as b:
+  pass
+with x(), y():
+  pass
+[out]
+MypyFile:1(
+  WithStmt:1(
+    Expr(
+      NameExpr(x))
+    Target(
+      NameExpr(y))
+    Expr(
+      NameExpr(a))
+    Target(
+      NameExpr(b))
+    Block:1(
+      PassStmt:2()))
+  WithStmt:3(
+    Expr(
+      CallExpr:3(
+        NameExpr(x)
+        Args()))
+    Expr(
+      CallExpr:3(
+        NameExpr(y)
+        Args()))
+    Block:3(
+      PassStmt:4())))
+
+[case testOperatorAssignment]
+x += 1
+x -= 1
+x *= 1
+x /= 1
+x //= 1
+x %= 1
+x **= 1
+x |= 1
+x &= 1
+x ^= 1
+x >>= 1
+x <<= 1
+[out]
+MypyFile:1(
+  OperatorAssignmentStmt:1(
+    +
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:2(
+    -
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:3(
+    *
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:4(
+    /
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:5(
+    //
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:6(
+    %
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:7(
+    **
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:8(
+    |
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:9(
+    &
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:10(
+    ^
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:11(
+    >>
+    NameExpr(x)
+    IntExpr(1))
+  OperatorAssignmentStmt:12(
+    <<
+    NameExpr(x)
+    IntExpr(1)))
+
+[case testNestedClasses]
+class A:
+  class B:
+    pass
+  class C:
+    pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    ClassDef:2(
+      B
+      PassStmt:3())
+    ClassDef:4(
+      C
+      PassStmt:5())))
+
+[case testTryWithExceptAndFinally]
+try:
+  pass
+except x:
+  x
+finally:
+  y
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      PassStmt:2())
+    NameExpr(x)
+    Block:3(
+      ExpressionStmt:4(
+        NameExpr(x)))
+    Finally(
+      ExpressionStmt:6(
+        NameExpr(y)))))
+
+[case testBareAsteriskInFuncDef]
+def f(x, *, y=1): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    MaxPos(1)
+    Args(
+      Var(x)
+      Var(y))
+    Init(
+      AssignmentStmt:1(
+        NameExpr(y)
+        IntExpr(1)))
+    Block:1(
+      PassStmt:1())))
+
+[case testBareAsteriskInFuncDefWithSignature]
+def f(x: A, *, y: B = 1) -> None: pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    MaxPos(1)
+    Args(
+      Var(x)
+      Var(y))
+    def (x: A?, *, y: B? =) -> None?
+    Init(
+      AssignmentStmt:1(
+        NameExpr(y)
+        IntExpr(1)))
+    Block:1(
+      PassStmt:1())))
+
+[case testBareAsteriskNamedDefault]
+def f(*, y: B = 1) -> None: pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    MaxPos(0)
+    Args(
+      Var(y))
+    def (*, y: B? =) -> None?
+    Init(
+      AssignmentStmt:1(
+        NameExpr(y)
+        IntExpr(1)))
+    Block:1(
+      PassStmt:1())))
+
+[case testBareAsteriskNamedNoDefault]
+def f(*, y: B) -> None: pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    MaxPos(0)
+    Args(
+      Var(y))
+    def (*, y: B?) -> None?
+    Block:1(
+      PassStmt:1())))
+
+[case testSuperExpr]
+super().x
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    SuperExpr:1(
+      x)))
+
+[case testKeywordAndDictArgs]
+f(x = y, **kwargs)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    CallExpr:1(
+      NameExpr(f)
+      Args()
+      KwArgs(
+        x
+        NameExpr(y))
+      DictVarArg(
+        NameExpr(kwargs)))))
+
+[case testSimpleFunctionType]
+f = None # type: Callable[[], None]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(f)
+    NameExpr(None)
+    Callable?[<TypeList >, None?]))
+
+[case testFunctionTypeWithArgument]
+f = None # type: Callable[[str], int]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(f)
+    NameExpr(None)
+    Callable?[<TypeList str?>, int?]))
+
+[case testFunctionTypeWithTwoArguments]
+f = None # type: Callable[[a[b], x.y], List[int]]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(f)
+    NameExpr(None)
+    Callable?[<TypeList a?[b?], x.y?>, List?[int?]]))
+
+[case testFunctionTypeWithExtraComma]
+def f(x: Callable[[str,], int]): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    def (x: Callable?[<TypeList str?>, int?]) -> Any
+    Block:1(
+      PassStmt:1())))
+
+[case testSimpleStringLiteralType]
+def f() -> 'A': pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def () -> A?
+    Block:1(
+      PassStmt:1())))
+
+[case testGenericStringLiteralType]
+def f() -> 'A[B, C]': pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def () -> A?[B?, C?]
+    Block:1(
+      PassStmt:1())))
+
+[case testPartialStringLiteralType]
+def f() -> A['B', C]: pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def () -> A?[B?, C?]
+    Block:1(
+      PassStmt:1())))
+
+[case testWhitespaceInStringLiteralType]
+def f() -> '  A  [  X  ]  ': pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def () -> A?[X?]
+    Block:1(
+      PassStmt:1())))
+
+[case testEscapeInStringLiteralType]
+def f() -> '\x41': pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def () -> A?
+    Block:1(
+      PassStmt:1())))
+
+[case testMetaclass]
+class Foo(metaclass=Bar): pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    Foo
+    Metaclass(Bar)
+    PassStmt:1()))
+
+[case testQualifiedMetaclass]
+class Foo(metaclass=foo.Bar): pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    Foo
+    Metaclass(foo.Bar)
+    PassStmt:1()))
+
+[case testBaseAndMetaclass]
+class Foo(foo.bar[x], metaclass=Bar): pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    Foo
+    Metaclass(Bar)
+    BaseTypeExpr(
+      IndexExpr:1(
+        MemberExpr:1(
+          NameExpr(foo)
+          bar)
+        NameExpr(x)))
+    PassStmt:1()))
+
+[case testClassKeywordArgs]
+class Foo(_root=None): pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    Foo
+    PassStmt:1()))
+
+[case testClassKeywordArgsBeforeMeta]
+class Foo(_root=None, metaclass=Bar): pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    Foo
+    Metaclass(Bar)
+    PassStmt:1()))
+
+[case testClassKeywordArgsAfterMeta]
+class Foo(metaclass=Bar, _root=None): pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    Foo
+    Metaclass(Bar)
+    PassStmt:1()))
+
+[case testNamesThatAreNoLongerKeywords]
+any = interface
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(any)
+    NameExpr(interface)))
+
+[case testFunctionOverload]
+ at foo
+def f() -> x: pass
+ at foo
+def f() -> y: pass
+[out]
+MypyFile:1(
+  OverloadedFuncDef:1(
+    Decorator:1(
+      Var(f)
+      NameExpr(foo)
+      FuncDef:2(
+        f
+        def () -> x?
+        Block:2(
+          PassStmt:2())))
+    Decorator:3(
+      Var(f)
+      NameExpr(foo)
+      FuncDef:4(
+        f
+        def () -> y?
+        Block:4(
+          PassStmt:4())))))
+
+[case testFunctionOverloadAndOtherStatements]
+x
+ at foo
+def f() -> x: pass
+ at foo
+def f() -> y: pass
+x
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    NameExpr(x))
+  OverloadedFuncDef:2(
+    Decorator:2(
+      Var(f)
+      NameExpr(foo)
+      FuncDef:3(
+        f
+        def () -> x?
+        Block:3(
+          PassStmt:3())))
+    Decorator:4(
+      Var(f)
+      NameExpr(foo)
+      FuncDef:5(
+        f
+        def () -> y?
+        Block:5(
+          PassStmt:5()))))
+  ExpressionStmt:6(
+    NameExpr(x)))
+
+[case testFunctionOverloadWithThreeVariants]
+ at foo
+def f() -> x: pass
+ at foo
+def f() -> y: pass
+ at foo
+def f(y): pass
+[out]
+MypyFile:1(
+  OverloadedFuncDef:1(
+    Decorator:1(
+      Var(f)
+      NameExpr(foo)
+      FuncDef:2(
+        f
+        def () -> x?
+        Block:2(
+          PassStmt:2())))
+    Decorator:3(
+      Var(f)
+      NameExpr(foo)
+      FuncDef:4(
+        f
+        def () -> y?
+        Block:4(
+          PassStmt:4())))
+    Decorator:5(
+      Var(f)
+      NameExpr(foo)
+      FuncDef:6(
+        f
+        Args(
+          Var(y))
+        Block:6(
+          PassStmt:6())))))
+
+[case testDecoratorsThatAreNotOverloads]
+ at foo
+def f() -> x: pass
+ at foo
+def g() -> y: pass
+[out]
+MypyFile:1(
+  Decorator:1(
+    Var(f)
+    NameExpr(foo)
+    FuncDef:2(
+      f
+      def () -> x?
+      Block:2(
+        PassStmt:2())))
+  Decorator:3(
+    Var(g)
+    NameExpr(foo)
+    FuncDef:4(
+      g
+      def () -> y?
+      Block:4(
+        PassStmt:4()))))
+
+[case testFunctionOverloadWithinFunction]
+def f():
+    @foo
+    def g(): pass
+    @foo
+    def g() -> x: pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      OverloadedFuncDef:2(
+        Decorator:2(
+          Var(g)
+          NameExpr(foo)
+          FuncDef:3(
+            g
+            Block:3(
+              PassStmt:3())))
+        Decorator:4(
+          Var(g)
+          NameExpr(foo)
+          FuncDef:5(
+            g
+            def () -> x?
+            Block:5(
+              PassStmt:5())))))))
+
+[case testCommentFunctionAnnotation]
+def f(): # type: () -> A
+  pass
+def g(x): # type: (A) -> B
+  pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def () -> A?
+    Block:1(
+      PassStmt:2()))
+  FuncDef:3(
+    g
+    Args(
+      Var(x))
+    def (x: A?) -> B?
+    Block:3(
+      PassStmt:4())))
+
+[case testCommentMethodAnnotation]
+class A:
+  def f(self): # type: () -> A
+    pass
+  def g(xself, x): # type: (A) -> B
+    pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      f
+      Args(
+        Var(self))
+      def (self: Any) -> A?
+      Block:2(
+        PassStmt:3()))
+    FuncDef:4(
+      g
+      Args(
+        Var(xself)
+        Var(x))
+      def (xself: Any, x: A?) -> B?
+      Block:4(
+        PassStmt:5()))))
+
+[case testCommentMethodAnnotationAndNestedFunction]
+class A:
+  def f(self): # type: () -> A
+    def g(x): # type: (A) -> B
+      pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      f
+      Args(
+        Var(self))
+      def (self: Any) -> A?
+      Block:2(
+        FuncDef:3(
+          g
+          Args(
+            Var(x))
+          def (x: A?) -> B?
+          Block:3(
+            PassStmt:4()))))))
+
+[case testCommentFunctionAnnotationOnSeparateLine]
+def f(x):
+  # type: (X) -> Y
+  pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    def (x: X?) -> Y?
+    Block:1(
+      PassStmt:3())))
+
+[case testCommentFunctionAnnotationOnSeparateLine2]
+def f(x):
+
+     # type: (X) -> Y       # bar
+
+  pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    def (x: X?) -> Y?
+    Block:1(
+      PassStmt:5())))
+
+[case testCommentFunctionAnnotationAndVarArg]
+def f(x, *y): # type: (X, *Y) -> Z
+  pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    def (x: X?, *y: Y?) -> Z?
+    VarArg(
+      Var(y))
+    Block:1(
+      PassStmt:2())))
+
+[case testCommentFunctionAnnotationAndAllVarArgs]
+def f(x, *y, **z): # type: (X, *Y, **Z) -> A
+  pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    def (x: X?, *y: Y?, **z: Z?) -> A?
+    VarArg(
+      Var(y))
+    DictVarArg(
+      Var(z))
+    Block:1(
+      PassStmt:2())))
+
+[case testClassDecorator]
+ at foo
+class X: pass
+ at foo(bar)
+ at x.y
+class Z: pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    X
+    Decorators(
+      NameExpr(foo))
+    PassStmt:2())
+  ClassDef:3(
+    Z
+    Decorators(
+      CallExpr:3(
+        NameExpr(foo)
+        Args(
+          NameExpr(bar)))
+      MemberExpr:4(
+        NameExpr(x)
+        y))
+    PassStmt:5()))
+
+[case testTrailingSemicolon]
+def x():
+    pass;
+
+def y():
+    pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    x
+    Block:1(
+      PassStmt:2()))
+  FuncDef:4(
+    y
+    Block:4(
+      PassStmt:5())))
+
+[case testEmptySuperClass]
+class A():
+    pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    PassStmt:2()))
+
+[case testStarExpression]
+*a
+*a, b
+a, *b
+a, (*x, y)
+a, (x, *y)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StarExpr:1(
+      NameExpr(a)))
+  ExpressionStmt:2(
+    TupleExpr:2(
+      StarExpr:2(
+        NameExpr(a))
+      NameExpr(b)))
+  ExpressionStmt:3(
+    TupleExpr:3(
+      NameExpr(a)
+      StarExpr:3(
+        NameExpr(b))))
+  ExpressionStmt:4(
+    TupleExpr:4(
+      NameExpr(a)
+      TupleExpr:4(
+        StarExpr:4(
+          NameExpr(x))
+        NameExpr(y))))
+  ExpressionStmt:5(
+    TupleExpr:5(
+      NameExpr(a)
+      TupleExpr:5(
+        NameExpr(x)
+        StarExpr:5(
+          NameExpr(y))))))
+
+[case testStarExpressionParenthesis]
+*(a)
+*(a,b)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    StarExpr:1(
+      NameExpr(a)))
+  ExpressionStmt:2(
+    StarExpr:2(
+      TupleExpr:2(
+        NameExpr(a)
+        NameExpr(b)))))
+
+[case testStarExpressionInFor]
+for *a in b:
+    pass
+
+for a, *b in c:
+    pass
+
+for *a, b in c:
+    pass
+[out]
+MypyFile:1(
+  ForStmt:1(
+    StarExpr:1(
+      NameExpr(a))
+    NameExpr(b)
+    Block:1(
+      PassStmt:2()))
+  ForStmt:4(
+    TupleExpr:4(
+      NameExpr(a)
+      StarExpr:4(
+        NameExpr(b)))
+    NameExpr(c)
+    Block:4(
+      PassStmt:5()))
+  ForStmt:7(
+    TupleExpr:7(
+      StarExpr:7(
+        NameExpr(a))
+      NameExpr(b))
+    NameExpr(c)
+    Block:7(
+      PassStmt:8())))
+
+[case testStarExprInGeneratorExpr]
+(x for y, *p in z)
+(x for *p, y in z)
+(x for y, *p, q in z)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    GeneratorExpr:1(
+      NameExpr(x)
+      TupleExpr:1(
+        NameExpr(y)
+        StarExpr:1(
+          NameExpr(p)))
+      NameExpr(z)))
+  ExpressionStmt:2(
+    GeneratorExpr:2(
+      NameExpr(x)
+      TupleExpr:2(
+        StarExpr:2(
+          NameExpr(p))
+        NameExpr(y))
+      NameExpr(z)))
+  ExpressionStmt:3(
+    GeneratorExpr:3(
+      NameExpr(x)
+      TupleExpr:3(
+        NameExpr(y)
+        StarExpr:3(
+          NameExpr(p))
+        NameExpr(q))
+      NameExpr(z))))
+
+[case testParseNamedtupleBaseclass]
+class A(namedtuple('x', ['y'])): pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    BaseTypeExpr(
+      CallExpr:1(
+        NameExpr(namedtuple)
+        Args(
+          StrExpr(x)
+          ListExpr:1(
+            StrExpr(y)))))
+    PassStmt:1()))
+
+[case testEllipsis]
+...
+a[1,...,2]
+....__class__
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    Ellipsis)
+  ExpressionStmt:2(
+    IndexExpr:2(
+      NameExpr(a)
+      TupleExpr:2(
+        IntExpr(1)
+        Ellipsis
+        IntExpr(2))))
+  ExpressionStmt:3(
+    MemberExpr:3(
+      Ellipsis
+      __class__)))
+
+[case testFunctionWithManyKindsOfArgs]
+def f(x, *args,  y=None, **kw): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    MaxPos(1)
+    Args(
+      Var(x)
+      Var(y))
+    Init(
+      AssignmentStmt:1(
+        NameExpr(y)
+        NameExpr(None)))
+    VarArg(
+      Var(args))
+    DictVarArg(
+      Var(kw))
+    Block:1(
+      PassStmt:1())))
+
+[case testIfWithSemicolons]
+if 1: a; b
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      IntExpr(1))
+    Then(
+      ExpressionStmt:1(
+        NameExpr(a))
+      ExpressionStmt:1(
+        NameExpr(b)))))
+
+[case testIfWithSemicolonsNested]
+while 2:
+    if 1: a; b
+[out]
+MypyFile:1(
+  WhileStmt:1(
+    IntExpr(2)
+    Block:1(
+      IfStmt:2(
+        If(
+          IntExpr(1))
+        Then(
+          ExpressionStmt:2(
+            NameExpr(a))
+          ExpressionStmt:2(
+            NameExpr(b)))))))
+
+[case testIfElseWithSemicolons]
+if 1: global x; y = 1
+else: x = 1; return 3
+4
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      IntExpr(1))
+    Then(
+      GlobalDecl:1(
+        x)
+      AssignmentStmt:1(
+        NameExpr(y)
+        IntExpr(1)))
+    Else(
+      AssignmentStmt:2(
+        NameExpr(x)
+        IntExpr(1))
+      ReturnStmt:2(
+        IntExpr(3))))
+  ExpressionStmt:3(
+    IntExpr(4)))
+
+[case testIfElseWithSemicolonsNested]
+while 2:
+    if 1: global x; y = 1
+    else: x = 1; return 3
+4
+[out]
+MypyFile:1(
+  WhileStmt:1(
+    IntExpr(2)
+    Block:1(
+      IfStmt:2(
+        If(
+          IntExpr(1))
+        Then(
+          GlobalDecl:2(
+            x)
+          AssignmentStmt:2(
+            NameExpr(y)
+            IntExpr(1)))
+        Else(
+          AssignmentStmt:3(
+            NameExpr(x)
+            IntExpr(1))
+          ReturnStmt:3(
+            IntExpr(3))))))
+  ExpressionStmt:4(
+    IntExpr(4)))
+
+[case testKeywordArgumentAfterStarArgumentInCall]
+f(x=1, *y)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    CallExpr:1(
+      NameExpr(f)
+      Args(
+        NameExpr(y))
+      VarArg
+      KwArgs(
+        x
+        IntExpr(1)))))
+
+[case testConditionalExpressionInSetComprehension]
+{ 1 if x else 2 for x in y }
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    SetComprehension:1(
+      GeneratorExpr:1(
+        ConditionalExpr:1(
+          Condition(
+            NameExpr(x))
+          IntExpr(1)
+          IntExpr(2))
+        NameExpr(x)
+        NameExpr(y)))))
+
+[case testConditionalExpressionInListComprehension]
+a = [ 1 if x else 2 for x in y ]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a)
+    ListComprehension:1(
+      GeneratorExpr:1(
+        ConditionalExpr:1(
+          Condition(
+            NameExpr(x))
+          IntExpr(1)
+          IntExpr(2))
+        NameExpr(x)
+        NameExpr(y)))))
+
+[case testComplexWithLvalue]
+with x as y.z: pass
+[out]
+MypyFile:1(
+  WithStmt:1(
+    Expr(
+      NameExpr(x))
+    Target(
+      MemberExpr:1(
+        NameExpr(y)
+        z))
+    Block:1(
+      PassStmt:1())))
+
+[case testRelativeImportWithEllipsis]
+from ... import x
+[out]
+MypyFile:1(
+  ImportFrom:1(..., [x]))
+
+[case testRelativeImportWithEllipsis2]
+from .... import x
+[out]
+MypyFile:1(
+  ImportFrom:1(...., [x]))
+
+[case testParseExtendedSlicing]
+a[:, :]
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    IndexExpr:1(
+      NameExpr(a)
+      TupleExpr:-1(
+        SliceExpr:-1(
+          <empty>
+          <empty>)
+        SliceExpr:-1(
+          <empty>
+          <empty>)))))
+
+[case testParseExtendedSlicing2]
+a[1:2:, :,]
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    IndexExpr:1(
+      NameExpr(a)
+      TupleExpr:-1(
+        SliceExpr:-1(
+          IntExpr(1)
+          IntExpr(2))
+        SliceExpr:-1(
+          <empty>
+          <empty>)))))
+
+[case testParseExtendedSlicing3]
+a[1:2:3, ..., 1]
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    IndexExpr:1(
+      NameExpr(a)
+      TupleExpr:-1(
+        SliceExpr:-1(
+          IntExpr(1)
+          IntExpr(2)
+          IntExpr(3))
+        Ellipsis
+        IntExpr(1)))))
+
+[case testParseIfExprInDictExpr]
+test =  { 'spam': 'eggs' if True else 'bacon' }
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(test)
+    DictExpr:1(
+      StrExpr(spam)
+      ConditionalExpr:1(
+        Condition(
+          NameExpr(True))
+        StrExpr(eggs)
+        StrExpr(bacon)))))
+
+[case testIgnoreLine]
+import x # type: ignore
+[out]
+MypyFile:1(
+  Import:1(x)
+  IgnoredLines(1))
+
+[case testIgnore2Lines]
+x
+y # type: ignore
+z # type: ignore
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    NameExpr(x))
+  ExpressionStmt:2(
+    NameExpr(y))
+  ExpressionStmt:3(
+    NameExpr(z))
+  IgnoredLines(2, 3))
+
+[case testCommentedOutIgnoreAnnotation]
+y ## type: ignore
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    NameExpr(y)))
+
+[case testInvalidIgnoreAnnotations]
+y # type: ignored
+y # type: IGNORE
+y # type : ignore
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    NameExpr(y))
+  ExpressionStmt:2(
+    NameExpr(y))
+  ExpressionStmt:3(
+    NameExpr(y)))
+
+[case testSpaceInIgnoreAnnotations]
+y #  type:  ignore    # foo
+y #type:ignore
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    NameExpr(y))
+  ExpressionStmt:2(
+    NameExpr(y))
+  IgnoredLines(1, 2))
+
+[case testIgnoreAnnotationAndMultilineStatement]
+x = {
+  1: 2  # type: ignore
+}
+y = {   # type: ignore
+  1: 2
+}       # type: ignore
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x)
+    DictExpr:1(
+      IntExpr(1)
+      IntExpr(2)))
+  AssignmentStmt:4(
+    NameExpr(y)
+    DictExpr:4(
+      IntExpr(1)
+      IntExpr(2)))
+  IgnoredLines(2, 4, 6))
+
+[case testIgnoreAnnotationAndMultilineStatement2]
+from m import ( # type: ignore
+  x, y
+)
+[out]
+MypyFile:1(
+  ImportFrom:1(m, [x, y])
+  IgnoredLines(1))
+
+[case testYieldExpression]
+def f():
+    x = yield f()
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      AssignmentStmt:2(
+        NameExpr(x)
+        YieldExpr:2(
+          CallExpr:2(
+            NameExpr(f)
+            Args()))))))
+
+[case testForWithSingleItemTuple]
+for x in 1,: pass
+[out]
+MypyFile:1(
+  ForStmt:1(
+    NameExpr(x)
+    TupleExpr:1(
+      IntExpr(1))
+    Block:1(
+      PassStmt:1())))
+
+[case testIsoLatinUnixEncoding]
+# coding: iso-latin-1-unix
+[out]
+MypyFile:1()
+
+[case testLatinUnixEncoding]
+# coding: latin-1-unix
+[out]
+MypyFile:1()
+
+[case testLatinUnixEncoding]
+# coding: iso-latin-1
+[out]
+MypyFile:1()
+
+[case testYieldExpressionInParens]
+def f():
+    (yield)
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ExpressionStmt:2(
+        YieldExpr:2()))))
diff --git a/test-data/unit/python2eval.test b/test-data/unit/python2eval.test
new file mode 100644
index 0000000..b750de8
--- /dev/null
+++ b/test-data/unit/python2eval.test
@@ -0,0 +1,474 @@
+-- Test cases for type checking mypy programs using full stubs and running
+-- using CPython (Python 2 mode).
+--
+-- These are mostly regression tests -- no attempt is made to make these
+-- complete.
+
+
+[case testAbs2_python2]
+n = None  # type: int
+f = None  # type: float
+n = abs(1)
+abs(1) + 'x'  # Error
+f = abs(1.1)
+abs(1.1) + 'x'  # Error
+[out]
+_program.py:4: error: Unsupported operand types for + ("int" and "str")
+_program.py:6: error: Unsupported operand types for + ("float" and "str")
+
+[case testUnicode_python2]
+x = unicode('xyz', 'latin1')
+print x
+x = u'foo'
+print repr(x)
+[out]
+xyz
+u'foo'
+
+[case testXrangeAndRange_python2]
+for i in xrange(2):
+    print i
+for i in range(3):
+    print i
+[out]
+0
+1
+0
+1
+2
+
+[case testIterator_python2]
+import typing, sys
+x = iter('bar')
+print x.next(), x.next()
+[out]
+b a
+
+[case testEncodeAndDecode_python2]
+print 'a'.encode('latin1')
+print 'b'.decode('latin1')
+print u'c'.encode('latin1')
+print u'd'.decode('latin1')
+[out]
+a
+b
+c
+d
+
+[case testHasKey_python2]
+d = {1: 'x'}
+print d.has_key(1)
+print d.has_key(2)
+[out]
+True
+False
+
+[case testIntegerDivision_python2]
+x = 1 / 2
+x()
+[out]
+_program.py:2: error: "int" not callable
+
+[case testFloatDivision_python2]
+x = 1.0 / 2.0
+x = 1.0 / 2
+x = 1 / 2.0
+x = 1.5
+[out]
+
+[case testAnyStr_python2]
+from typing import AnyStr
+def f(x): # type: (AnyStr) -> AnyStr
+    if isinstance(x, str):
+        return 'foo'
+    else:
+        return u'zar'
+print f('')
+print f(u'')
+[out]
+foo
+zar
+
+[case testGenericPatterns_python2]
+from typing import Pattern
+import re
+p = None  # type: Pattern[unicode]
+p = re.compile(u'foo*')
+b = None  # type: Pattern[str]
+b = re.compile('foo*')
+print(p.match(u'fooo').group(0))
+[out]
+fooo
+
+[case testGenericMatch_python2]
+from typing import Match
+import re
+def f(m): # type: (Match[str]) -> None
+    print(m.group(0))
+f(re.match('x*', 'xxy'))
+[out]
+xx
+
+[case testVariableLengthTuple_python2]
+from typing import Tuple, cast
+x = cast(Tuple[int, ...], ())
+print(x)
+[out]
+()
+
+[case testFromFuturePrintFunction_python2]
+from __future__ import print_function
+print('a', 'b')
+[out]
+a b
+
+[case testFromFutureImportUnicodeLiterals_python2]
+from __future__ import unicode_literals
+print '>', ['a', b'b', u'c']
+[out]
+> [u'a', 'b', u'c']
+
+[case testUnicodeLiteralsKwargs_python2]
+from __future__ import unicode_literals
+def f(**kwargs):  # type: (...) -> None
+  pass
+params = {'a': 'b'}
+f(**params)
+[out]
+
+[case testUnicodeStringKwargs_python2]
+def f(**kwargs):  # type: (...) -> None
+  pass
+params = {u'a': 'b'}
+f(**params)
+[out]
+
+[case testStrKwargs_python2]
+def f(**kwargs):  # type: (...) -> None
+  pass
+params = {'a': 'b'}
+f(**params)
+[out]
+
+[case testFromFutureImportUnicodeLiterals2_python2]
+from __future__ import unicode_literals
+def f(x): # type: (str) -> None
+  pass
+f(b'')
+f(u'')
+f('')
+[out]
+_program.py:5: error: Argument 1 to "f" has incompatible type "unicode"; expected "str"
+_program.py:6: error: Argument 1 to "f" has incompatible type "unicode"; expected "str"
+
+[case testStrUnicodeCompatibility_python2]
+import typing
+def f(s): # type: (unicode) -> None
+    pass
+f(u'')
+f('')
+[out]
+
+[case testStrUnicodeCompatibilityInBuiltins_python2]
+import typing
+'x'.count('x')
+'x'.count(u'x')
+[out]
+
+[case testTupleAsSubtypeOfSequence_python2]
+from typing import TypeVar, Sequence
+T = TypeVar('T')
+def f(a): # type: (Sequence[T]) -> None
+    print a
+f(tuple())
+[out]
+()
+
+[case testReadOnlyProperty_python2]
+import typing
+class A:
+    @property
+    def foo(self): # type: () -> int
+        return 1
+print(A().foo + 2)
+[out]
+3
+
+[case testIOTypes_python2]
+from typing import IO, TextIO, BinaryIO, Any
+class X(IO[str]): pass
+class Y(TextIO): pass
+class Z(BinaryIO): pass
+[out]
+
+[case testOpenReturnType_python2]
+import typing
+f = open('/tmp/xyz', 'w')
+f.write(u'foo')
+f.write('bar')
+f.close()
+[out]
+_program.py:3: error: Argument 1 to "write" of "IO" has incompatible type "unicode"; expected "str"
+
+[case testPrintFunctionWithFileArg_python2]
+from __future__ import print_function
+import typing
+if 1 == 2: # Don't want to run the code below, since it would create a file.
+    f = open('/tmp/xyz', 'w')
+    print('foo', file=f)
+    f.close()
+print('ok')
+[out]
+ok
+
+[case testStringIO_python2]
+import typing
+import io
+c = io.StringIO()
+c.write(u'\x89')
+print(repr(c.getvalue()))
+[out]
+u'\x89'
+
+[case testBytesIO_python2]
+import typing
+import io
+c = io.BytesIO()
+c.write('\x89')
+print(repr(c.getvalue()))
+[out]
+'\x89'
+
+[case testTextIOWrapper_python2]
+import typing
+import io
+b = io.BytesIO(u'\xab'.encode('utf8'))
+w = io.TextIOWrapper(b, encoding='utf8')
+print(repr(w.read()))
+[out]
+u'\xab'
+
+[case testIoOpen_python2]
+import typing
+import io
+if 1 == 2: # Only type check, do not execute
+    f = io.open('/tmp/xyz', 'w', encoding='utf8')
+    f.write(u'\xab')
+    f.close()
+print 'ok'
+[out]
+ok
+
+[case testUnionType_python2]
+from typing import Union
+y = None  # type: Union[int, str]
+def f(x): # type: (Union[int, str]) -> str
+    if isinstance(x, int):
+        x = str(x)
+    return x
+print f(12)
+print f('ab')
+[out]
+12
+ab
+
+[case testStrAdd_python2]
+import typing
+s = ''
+u = u''
+n = 0
+n = s + '' # E
+s = s + u'' # E
+[out]
+_program.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+_program.py:6: error: Incompatible types in assignment (expression has type "unicode", variable has type "str")
+
+[case testStrJoin_python2]
+import typing
+s = ''
+u = u''
+n = 0
+n = ''.join([''])   # Error
+s = ''.join([u''])  # Error
+[out]
+_program.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+_program.py:6: error: Incompatible types in assignment (expression has type "unicode", variable has type "str")
+
+[case testNamedTuple_python2]
+import typing
+from collections import namedtuple
+X = namedtuple('X', ['a', 'b'])
+x = X(a=1, b='s')
+print x.a, x.b
+[out]
+1 s
+
+[case testNamedTupleError_python2]
+import typing
+from collections import namedtuple
+X = namedtuple('X', ['a', 'b'])
+x = X(a=1, b='s')
+x.c
+[out]
+_program.py:5: error: "X" has no attribute "c"
+
+[case testAssignToComplexReal_python2]
+import typing
+x = 4j
+y = x.real
+y = x         # Error
+x.imag = 2.0  # Error
+[out]
+_program.py:4: error: Incompatible types in assignment (expression has type "complex", variable has type "float")
+_program.py:5: error: Property "imag" defined in "complex" is read-only
+
+[case testComplexArithmetic_python2]
+import typing
+print 5 + 8j
+print 3j * 2.0
+print 4j / 2.0
+[out]
+(5+8j)
+6j
+2j
+
+[case testNamedTupleWithTypes_python2]
+from typing import NamedTuple
+N = NamedTuple('N', [('a', int), ('b', str)])
+n = N(1, 'x')
+print n
+a, b = n
+print a, b
+print n[0]
+[out]
+N(a=1, b='x')
+1 x
+1
+
+[case testUnionTypeAlias_python2]
+from typing import Union
+U = Union[int, str]
+u = 1 # type: U
+u = 1.1
+[out]
+_program.py:4: error: Incompatible types in assignment (expression has type "float", variable has type "Union[int, str]")
+
+[case testSuperNew_python2]
+from typing import Dict, Any
+class MyType(type):
+    def __new__(cls, name, bases, namespace):
+        # type: (str, tuple, Dict[str, Any]) -> type
+        return super(MyType, cls).__new__(cls, name + 'x', bases, namespace)
+class A(object):
+    __metaclass__ = MyType
+print(type(A()).__name__)
+[out]
+Ax
+
+[case testSequenceIndexAndCount_python2]
+from typing import Sequence
+def f(x): # type: (Sequence[int]) -> None
+    print(x.index(1))
+    print(x.count(1))
+f([0, 0, 1, 1, 1])
+[out]
+2
+3
+
+[case testOptional_python2]
+from typing import Optional
+def f(): # type: () -> Optional[int]
+    pass
+x = f()
+y = 1
+y = x
+
+[case testUnicodeAndOverloading_python2]
+from m import f
+f(1)
+f('')
+f(u'')
+f(b'')
+[file m.pyi]
+from typing import overload
+ at overload
+def f(x): # type: (unicode) -> int
+  pass
+ at overload
+def f(x): # type: (bytearray) -> int
+  pass
+[out]
+_program.py:2: error: No overload variant of "f" matches argument types [builtins.int]
+
+[case testByteArrayStrCompatibility_python2]
+def f(x): # type: (str) -> None
+    pass
+f(bytearray('foo'))
+
+[case testAbstractProperty_python2]
+from abc import abstractproperty, ABCMeta
+class A:
+    __metaclass__ = ABCMeta
+    @abstractproperty
+    def x(self): # type: () -> int
+        pass
+class B(A):
+    @property
+    def x(self): # type: () -> int
+        return 3
+b = B()
+print b.x + 1
+[out]
+4
+
+[case testReModuleBytesPython2]
+# Regression tests for various overloads in the re module -- bytes version
+import re
+if False:
+    bre = b'a+'
+    bpat = re.compile(bre)
+    bpat = re.compile(bpat)
+    re.search(bre, b'').groups()
+    re.search(bre, u'')
+    re.search(bpat, b'').groups()
+    re.search(bpat, u'')
+    # match(), split(), findall(), finditer() are much the same, so skip those.
+    # sub(), subn() have more overloads and we are checking these:
+    re.sub(bre, b'', b'') + b''
+    re.sub(bpat, b'', b'') + b''
+    re.sub(bre, lambda m: b'', b'') + b''
+    re.sub(bpat, lambda m: b'', b'') + b''
+    re.subn(bre, b'', b'')[0] + b''
+    re.subn(bpat, b'', b'')[0] + b''
+    re.subn(bre, lambda m: b'', b'')[0] + b''
+    re.subn(bpat, lambda m: b'', b'')[0] + b''
+[out]
+
+[case testReModuleStringPython2]
+# Regression tests for various overloads in the re module -- string version
+import re
+ure = u'a+'
+upat = re.compile(ure)
+upat = re.compile(upat)
+re.search(ure, u'a').groups()
+re.search(ure, b'') # This ought to be an error, but isn't because of bytes->unicode equivalence
+re.search(upat, u'a').groups()
+re.search(upat, b'') # This ought to be an error, but isn't because of bytes->unicode equivalence
+# match(), split(), findall(), finditer() are much the same, so skip those.
+# sus(), susn() have more overloads and we are checking these:
+re.sub(ure, u'', u'') + u''
+re.sub(upat, u'', u'') + u''
+re.sub(ure, lambda m: u'', u'') + u''
+re.sub(upat, lambda m: u'', u'') + u''
+re.subn(ure, u'', u'')[0] + u''
+re.subn(upat, u'', u'')[0] + u''
+re.subn(ure, lambda m: u'', u'')[0] + u''
+re.subn(upat, lambda m: u'', u'')[0] + u''
+[out]
+
+[case testYieldRegressionTypingAwaitable_python2]
+# Make sure we don't reference typing.Awaitable in Python 2 mode.
+def g():  # type: () -> int
+    yield
+[out]
+_program.py:2: error: The return type of a generator function should be "Generator" or one of its supertypes
diff --git a/test-data/unit/pythoneval-asyncio.test b/test-data/unit/pythoneval-asyncio.test
new file mode 100644
index 0000000..6d16903
--- /dev/null
+++ b/test-data/unit/pythoneval-asyncio.test
@@ -0,0 +1,486 @@
+-- Test cases for type checking mypy programs using full stubs and running
+-- using CPython.
+--
+-- These are mostly regression tests -- no attempt is made to make these
+-- complete.
+--
+-- This test file check Asyncio and yield from interaction
+
+[case testImportAsyncio]
+import asyncio
+print('Imported')
+[out]
+Imported
+
+[case testSimpleCoroutineSleep]
+from typing import Any, Generator
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def greet_every_two_seconds() -> 'Generator[Any, None, None]':
+    n = 0
+    while n < 5:
+        print('Prev', n)
+        yield from asyncio.sleep(0.1)
+        print('After', n)
+        n += 1
+
+loop = asyncio.get_event_loop()
+try:
+    loop.run_until_complete(greet_every_two_seconds())
+finally:
+    loop.close()
+[out]
+Prev 0
+After 0
+Prev 1
+After 1
+Prev 2
+After 2
+Prev 3
+After 3
+Prev 4
+After 4
+
+[case testCoroutineCallingOtherCoroutine]
+from typing import Generator, Any
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def compute(x: int, y: int) -> 'Generator[Any, None, int]':
+    print("Compute %s + %s ..." % (x, y))
+    yield from asyncio.sleep(0.1)
+    return x + y   # Here the int is wrapped in Future[int]
+
+ at asyncio.coroutine
+def print_sum(x: int, y: int) -> 'Generator[Any, None, None]':
+    result = yield from compute(x, y)  # The type of result will be int (is extracted from Future[int]
+    print("%s + %s = %s" % (x, y, result))
+
+loop = asyncio.get_event_loop()
+loop.run_until_complete(print_sum(1, 2))
+loop.close()
+[out]
+Compute 1 + 2 ...
+1 + 2 = 3
+
+[case testCoroutineChangingFuture]
+from typing import Generator, Any
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]':
+    yield from asyncio.sleep(0.1)
+    future.set_result('Future is done!')
+
+loop = asyncio.get_event_loop()
+future = asyncio.Future()  # type: Future[str]
+asyncio.Task(slow_operation(future))
+loop.run_until_complete(future)
+print(future.result())
+loop.close()
+[out]
+Future is done!
+
+[case testFunctionAssignedAsCallback]
+import typing
+from typing import Generator, Any
+import asyncio
+from asyncio import Future, AbstractEventLoop
+
+ at asyncio.coroutine
+def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]':
+    yield from asyncio.sleep(1)
+    future.set_result('Callback works!')
+
+def got_result(future: 'Future[str]') -> None:
+    print(future.result())
+    loop.stop()
+
+loop = asyncio.get_event_loop() # type: AbstractEventLoop
+future = asyncio.Future()  # type: Future[str]
+asyncio.Task(slow_operation(future))  # Here create a task with the function. (The Task need a Future[T] as first argument)
+future.add_done_callback(got_result)  # and assign the callback to the future
+try:
+    loop.run_forever()
+finally:
+    loop.close()
+[out]
+Callback works!
+
+[case testMultipleTasks]
+import typing
+from typing import Generator, Any
+import asyncio
+from asyncio import Task, Future
+ at asyncio.coroutine
+def factorial(name, number) -> 'Generator[Any, None, None]':
+    f = 1
+    for i in range(2, number+1):
+        print("Task %s: Compute factorial(%s)..." % (name, i))
+        yield from asyncio.sleep(0.1)
+        f *= i
+    print("Task %s: factorial(%s) = %s" % (name, number, f))
+
+loop = asyncio.get_event_loop()
+tasks = [
+    asyncio.Task(factorial("A", 2)),
+    asyncio.Task(factorial("B", 3)),
+    asyncio.Task(factorial("C", 4))]
+loop.run_until_complete(asyncio.wait(tasks))
+loop.close()
+[out]
+Task A: Compute factorial(2)...
+Task B: Compute factorial(2)...
+Task C: Compute factorial(2)...
+Task A: factorial(2) = 2
+Task B: Compute factorial(3)...
+Task C: Compute factorial(3)...
+Task B: factorial(3) = 6
+Task C: Compute factorial(4)...
+Task C: factorial(4) = 24
+
+
+[case testConcatenatedCoroutines]
+import typing
+from typing import Generator, Any
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def h4() -> 'Generator[Any, None, int]':
+    x = yield from future
+    return x
+
+ at asyncio.coroutine
+def h3() -> 'Generator[Any, None, int]':
+    x = yield from h4()
+    print("h3: %s" % x)
+    return x
+
+ at asyncio.coroutine
+def h2() -> 'Generator[Any, None, int]':
+    x = yield from h3()
+    print("h2: %s" % x)
+    return x
+
+ at asyncio.coroutine
+def h() -> 'Generator[Any, None, None]':
+    x = yield from h2()
+    print("h: %s" % x)
+
+loop = asyncio.get_event_loop()
+future = asyncio.Future()  # type: Future[int]
+future.set_result(42)
+loop.run_until_complete(h())
+print("Outside %s" % future.result())
+loop.close()
+[out]
+h3: 42
+h2: 42
+h: 42
+Outside 42
+
+[case testConcatenatedCoroutinesReturningFutures]
+import typing
+from typing import Generator, Any
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def h4() -> 'Generator[Any, None, Future[int]]':
+    yield from asyncio.sleep(0.1)
+    f = asyncio.Future() #type: Future[int]
+    return f
+
+ at asyncio.coroutine
+def h3() -> 'Generator[Any, None, Future[Future[int]]]':
+    x = yield from h4()
+    x.set_result(42)
+    f = asyncio.Future() #type: Future[Future[int]]
+    f.set_result(x)
+    return f
+
+ at asyncio.coroutine
+def h() -> 'Generator[Any, None, None]':
+    print("Before")
+    x = yield from h3()
+    y = yield from x
+    z = yield from y
+    print(z)
+    def normalize(future):
+        # The str conversion seems inconsistent; not sure exactly why. Normalize
+        # the result.
+        return str(future).replace('<Future finished ', 'Future<')
+    print(normalize(y))
+    print(normalize(x))
+
+loop = asyncio.get_event_loop()
+loop.run_until_complete(h())
+loop.close()
+[out]
+Before
+42
+Future<result=42>
+Future<result=Future<result=42>>
+
+
+[case testCoroutineWithOwnClass]
+import typing
+from typing import Generator, Any
+import asyncio
+from asyncio import Future
+
+class A:
+    def __init__(self, x: int) -> None:
+        self.x = x
+
+ at asyncio.coroutine
+def h() -> 'Generator[Any, None, None]':
+    x = yield from future
+    print("h: %s" % x.x)
+
+loop = asyncio.get_event_loop()
+future = asyncio.Future()  # type: Future[A]
+future.set_result(A(42))
+loop.run_until_complete(h())
+print("Outside %s" % future.result().x)
+loop.close()
+[out]
+h: 42
+Outside 42
+
+
+-- Errors
+
+[case testErrorAssigningCoroutineThatDontReturn]
+from typing import Generator, Any
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def greet() -> 'Generator[Any, None, None]':
+    yield from asyncio.sleep(0.2)
+    print('Hello World')
+
+ at asyncio.coroutine
+def test() -> 'Generator[Any, None, None]':
+    yield from greet()
+    x = yield from greet()  # Error
+
+loop = asyncio.get_event_loop()
+try:
+    loop.run_until_complete(test())
+finally:
+    loop.close()
+[out]
+_program.py:13: error: Function does not return a value
+
+[case testErrorReturnIsNotTheSameType]
+from typing import Generator, Any
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def compute(x: int, y: int) -> 'Generator[Any, None, int]':
+    print("Compute %s + %s ..." % (x, y))
+    yield from asyncio.sleep(0.1)
+    return str(x + y)   # Error
+
+ at asyncio.coroutine
+def print_sum(x: int, y: int) -> 'Generator[Any, None, None]':
+    result = yield from compute(x, y)
+    print("%s + %s = %s" % (x, y, result))
+
+loop = asyncio.get_event_loop()
+loop.run_until_complete(print_sum(1, 2))
+loop.close()
+
+[out]
+_program.py:9: error: Incompatible return value type (got "str", expected "int")
+
+[case testErrorSetFutureDifferentInternalType]
+from typing import Generator, Any
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]':
+    yield from asyncio.sleep(1)
+    future.set_result(42)  # Error
+
+loop = asyncio.get_event_loop()
+future = asyncio.Future()  # type: Future[str]
+asyncio.Task(slow_operation(future))
+loop.run_until_complete(future)
+print(future.result())
+loop.close()
+[out]
+_program.py:8: error: Argument 1 to "set_result" of "Future" has incompatible type "int"; expected "str"
+
+
+[case testErrorUsingDifferentFutureType]
+from typing import Any, Generator
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def slow_operation(future: 'Future[int]') -> 'Generator[Any, None, None]':
+    yield from asyncio.sleep(1)
+    future.set_result(42)
+
+loop = asyncio.get_event_loop()
+future = asyncio.Future()  # type: Future[str]
+asyncio.Task(slow_operation(future))  # Error
+loop.run_until_complete(future)
+print(future.result())
+loop.close()
+[out]
+_program.py:12: error: Argument 1 to "slow_operation" has incompatible type Future[str]; expected Future[int]
+
+[case testErrorUsingDifferentFutureTypeAndSetFutureDifferentInternalType]
+from typing import Generator, Any
+import asyncio
+from asyncio import Future
+
+asyncio.coroutine
+def slow_operation(future: 'Future[int]') -> 'Generator[Any, None, None]':
+    yield from asyncio.sleep(1)
+    future.set_result('42')  #Try to set an str as result to a Future[int]
+
+loop = asyncio.get_event_loop()
+future = asyncio.Future()  # type: Future[str]
+asyncio.Task(slow_operation(future))  # Error
+loop.run_until_complete(future)
+print(future.result())
+loop.close()
+[out]
+_program.py:8: error: Argument 1 to "set_result" of "Future" has incompatible type "str"; expected "int"
+_program.py:12: error: Argument 1 to "slow_operation" has incompatible type Future[str]; expected Future[int]
+
+[case testErrorSettingCallbackWithDifferentFutureType]
+import typing
+from typing import Generator, Any
+import asyncio
+from asyncio import Future, AbstractEventLoop
+
+ at asyncio.coroutine
+def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]':
+    yield from asyncio.sleep(1)
+    future.set_result('Future is done!')
+
+def got_result(future: 'Future[int]') -> None:
+    print(future.result())
+    loop.stop()
+
+loop = asyncio.get_event_loop() # type: AbstractEventLoop
+future = asyncio.Future()  # type: Future[str]
+asyncio.Task(slow_operation(future))
+future.add_done_callback(got_result)  # Error
+
+try:
+    loop.run_forever()
+finally:
+    loop.close()
+[out]
+_program.py:18: error: Argument 1 to "add_done_callback" of "Future" has incompatible type Callable[[Future[int]], None]; expected Callable[[Future[str]], Any]
+
+[case testErrorOneMoreFutureInReturnType]
+import typing
+from typing import Any, Generator
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def h4() -> 'Generator[Any, None, Future[int]]':
+    yield from asyncio.sleep(1)
+    f = asyncio.Future() #type: Future[int]
+    return f
+
+ at asyncio.coroutine
+def h3() -> 'Generator[Any, None, Future[Future[Future[int]]]]':
+    x = yield from h4()
+    x.set_result(42)
+    f = asyncio.Future() #type: Future[Future[int]]
+    f.set_result(x)
+    return f
+
+ at asyncio.coroutine
+def h() -> 'Generator[Any, None, None]':
+    print("Before")
+    x = yield from h3()
+    y = yield from x
+    z = yield from y
+    print(z)
+    print(y)
+    print(x)
+
+loop = asyncio.get_event_loop()
+loop.run_until_complete(h())
+loop.close()
+[out]
+_program.py:18: error: Incompatible return value type (got Future[Future[int]], expected Future[Future[Future[int]]])
+
+[case testErrorOneLessFutureInReturnType]
+import typing
+from typing import Any, Generator
+import asyncio
+from asyncio import Future
+
+ at asyncio.coroutine
+def h4() -> 'Generator[Any, None, Future[int]]':
+    yield from asyncio.sleep(1)
+    f = asyncio.Future() #type: Future[int]
+    return f
+
+ at asyncio.coroutine
+def h3() -> 'Generator[Any, None, Future[int]]':
+    x = yield from h4()
+    x.set_result(42)
+    f = asyncio.Future() #type: Future[Future[int]]
+    f.set_result(x)
+    return f
+
+ at asyncio.coroutine
+def h() -> 'Generator[Any, None, None]':
+    print("Before")
+    x = yield from h3()
+    y = yield from x
+    print(y)
+    print(x)
+
+loop = asyncio.get_event_loop()
+loop.run_until_complete(h())
+loop.close()
+[out]
+_program.py:18: error: Incompatible return value type (got Future[Future[int]], expected Future[int])
+
+[case testErrorAssignmentDifferentType]
+import typing
+from typing import Generator, Any
+import asyncio
+from asyncio import Future
+
+class A:
+    def __init__(self, x: int) -> None:
+        self.x = x
+
+class B:
+    def __init__(self, x: int) -> None:
+        self.x = x
+
+ at asyncio.coroutine
+def h() -> 'Generator[Any, None, None]':
+    x = yield from future # type: B # Error
+    print("h: %s" % x.x)
+
+loop = asyncio.get_event_loop()
+future = asyncio.Future()  # type: Future[A]
+future.set_result(A(42))
+loop.run_until_complete(h())
+loop.close()
+[out]
+_program.py:16: error: Incompatible types in assignment (expression has type "A", variable has type "B")
diff --git a/test-data/unit/pythoneval-enum.test b/test-data/unit/pythoneval-enum.test
new file mode 100644
index 0000000..3ae2df5
--- /dev/null
+++ b/test-data/unit/pythoneval-enum.test
@@ -0,0 +1,134 @@
+-- Test cases for type checking mypy programs using full stubs and running
+-- using CPython.
+--
+-- These are mostly regression tests -- no attempt is made to make these
+-- complete.
+--
+-- This test file checks Enum
+
+[case testEnumBasics]
+from enum import Enum
+class Medal(Enum):
+    gold = 1
+    silver = 2
+    bronze = 3
+m = Medal.gold
+m = 1
+[out]
+_program.py:7: error: Incompatible types in assignment (expression has type "int", variable has type "Medal")
+
+[case testEnumNameAndValue]
+from enum import Enum
+class Truth(Enum):
+    true = True
+    false = False
+x = ''
+x = Truth.true.name
+print(Truth.true.name)
+print(Truth.false.value)
+[out]
+true
+False
+
+[case testEnumUnique]
+import enum
+ at enum.unique
+class E(enum.Enum):
+    x = 1
+    y = 1  # NOTE: This duplicate value is not detected by mypy at the moment
+x = 1
+x = E.x
+[out]
+_program.py:7: error: Incompatible types in assignment (expression has type "E", variable has type "int")
+
+[case testIntEnum_assignToIntVariable]
+from enum import IntEnum
+class N(IntEnum):
+    x = 1
+    y = 1
+n = 1
+n = N.x  # Subclass of int, so it's okay
+s = ''
+s = N.y
+[out]
+_program.py:8: error: Incompatible types in assignment (expression has type "N", variable has type "str")
+
+[case testIntEnum_functionTakingIntEnum]
+from enum import IntEnum
+class SomeIntEnum(IntEnum):
+    x = 1
+def takes_some_int_enum(n: SomeIntEnum):
+    pass
+takes_some_int_enum(SomeIntEnum.x)
+takes_some_int_enum(1)  # Error
+takes_some_int_enum(SomeIntEnum(1))  # How to deal with the above
+[out]
+_program.py:7: error: Argument 1 to "takes_some_int_enum" has incompatible type "int"; expected "SomeIntEnum"
+
+[case testIntEnum_functionTakingInt]
+from enum import IntEnum
+class SomeIntEnum(IntEnum):
+    x = 1
+def takes_int(i: int):
+    pass
+takes_int(SomeIntEnum.x)
+takes_int(2)
+
+[case testIntEnum_functionReturningIntEnum]
+from enum import IntEnum
+class SomeIntEnum(IntEnum):
+    x = 1
+def returns_some_int_enum() -> SomeIntEnum:
+    return SomeIntEnum.x
+an_int = 1
+an_int = returns_some_int_enum()
+
+an_enum = SomeIntEnum.x
+an_enum = returns_some_int_enum()
+[out]
+
+[case testEnumMethods]
+from enum import Enum
+
+class Color(Enum):
+    red = 1
+    green = 2
+
+    def m(self, x: int): pass
+    @staticmethod
+    def m2(x: int): pass
+
+Color.red.m('')
+Color.m2('')
+[out]
+_program.py:11: error: Argument 1 to "m" of "Color" has incompatible type "str"; expected "int"
+_program.py:12: error: Argument 1 to "m2" of "Color" has incompatible type "str"; expected "int"
+
+[case testIntEnum_ExtendedIntEnum_functionTakingExtendedIntEnum]
+from enum import IntEnum
+class ExtendedIntEnum(IntEnum):
+    pass
+class SomeExtIntEnum(ExtendedIntEnum):
+    x = 1
+
+def takes_int(i: int):
+    pass
+takes_int(SomeExtIntEnum.x)
+
+def takes_some_ext_int_enum(s: SomeExtIntEnum):
+    pass
+takes_some_ext_int_enum(SomeExtIntEnum.x)
+
+
+[case testNamedTupleEnum]
+from typing import NamedTuple
+from enum import Enum
+
+N = NamedTuple('N', [('bar', int)])
+
+class E(N, Enum):
+    X = N(1)
+
+def f(x: E) -> None: pass
+
+f(E.X)
diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test
new file mode 100644
index 0000000..7945fe5
--- /dev/null
+++ b/test-data/unit/pythoneval.test
@@ -0,0 +1,1214 @@
+-- Test cases for type checking mypy programs using full stubs and running
+-- using CPython.
+--
+-- These are mostly regression tests -- no attempt is made to make these
+-- complete.
+
+
+[case testHello]
+import typing
+print('hello, world')
+[out]
+hello, world
+
+-- Skipped because different typing package versions have different repr()s. 
+[case testAbstractBaseClasses-skip]
+import re
+from typing import Sized, Sequence, Iterator, Iterable, Mapping, AbstractSet
+
+def check(o, t):
+    rep = re.sub('0x[0-9a-fA-F]+', '0x...', repr(o))
+    rep = rep.replace('sequenceiterator', 'str_iterator')
+    trep = str(t).replace('_abcoll.Sized', 'collections.abc.Sized')
+    print(rep, trep, isinstance(o, t))
+
+def f():
+    check('x', Sized)
+    check([1], Sequence)
+    check({1:3}, Sequence)
+    check(iter('x'), Iterator)
+    check('x', Iterable)
+    check({}, Mapping)
+    check(set([1]), AbstractSet)
+
+f()
+[out]
+'x' <class 'collections.abc.Sized'> True
+[1] typing.Sequence True
+{1: 3} typing.Sequence False
+<str_iterator object at 0x...> typing.Iterator True
+'x' typing.Iterable True
+{} typing.Mapping True
+{1} typing.AbstractSet True
+
+[case testSized]
+from typing import Sized
+class A(Sized):
+    def __len__(self): return 5
+print(len(A()))
+[out]
+5
+
+[case testReversed]
+from typing import Reversible
+class A(Reversible):
+    def __iter__(self): return iter('oof')
+    def __reversed__(self): return iter('foo')
+print(list(reversed(range(5))))
+print(list(reversed([1,2,3])))
+print(list(reversed('abc')))
+print(list(reversed(A())))
+[out]
+-- Duplicate [ at line beginning.
+[[4, 3, 2, 1, 0]
+[[3, 2, 1]
+[['c', 'b', 'a']
+[['f', 'o', 'o']
+
+[case testIntAndFloatConversion]
+from typing import SupportsInt, SupportsFloat
+class A(SupportsInt):
+    def __int__(self): return 5
+class B(SupportsFloat):
+    def __float__(self): return 1.2
+print(int(1))
+print(int(6.2))
+print(int('3'))
+print(int(b'4'))
+print(int(A()))
+print(float(-9))
+print(float(B()))
+[out]
+1
+6
+3
+4
+5
+-9.0
+1.2
+
+[case testAbs]
+from typing import SupportsAbs
+class A(SupportsAbs[float]):
+    def __abs__(self) -> float: return 5.5
+
+print(abs(-1))
+print(abs(-1.2))
+print(abs(A()))
+[out]
+1
+1.2
+5.5
+
+[case testAbs2]
+
+n = None  # type: int
+f = None  # type: float
+n = abs(1)
+abs(1) + 'x'  # Error
+f = abs(1.1)
+abs(1.1) + 'x'  # Error
+[out]
+_program.py:5: error: Unsupported operand types for + ("int" and "str")
+_program.py:7: error: Unsupported operand types for + ("float" and "str")
+
+[case testRound]
+from typing import SupportsRound
+class A(SupportsRound):
+    def __round__(self, ndigits=0): return 'x%d' % ndigits
+print(round(1.6))
+print(round(A()))
+print(round(A(), 2))
+[out]
+2
+x0
+x2
+
+[case testCallMethodViaTypeObject]
+import typing
+print(list.__add__([1, 2], [3, 4]))
+[out]
+[[1, 2, 3, 4]
+
+[case testClassDataAttribute]
+import typing
+class A:
+    x = 0
+print(A.x)
+A.x += 1
+print(A.x)
+[out]
+0
+1
+
+[case testInheritedClassAttribute]
+import typing
+class A:
+    x = 1
+    def f(self) -> None: print('f')
+class B(A):
+    pass
+B.f(None)
+print(B.x)
+[out]
+f
+1
+
+[case testFunctionDecorator]
+from typing import TypeVar, cast
+ftype = TypeVar('ftype')
+def logged(f: ftype) -> ftype:
+    def g(*args, **kwargs):
+        print('enter', f.__name__)
+        r = f(*args, **kwargs)
+        print('exit', f.__name__)
+        return r
+    return cast(ftype, g)
+
+ at logged
+def foo(s: str) -> str:
+    print('foo', s)
+    return s + '!'
+
+print(foo('y'))
+print(foo('x'))
+[out]
+enter foo
+foo y
+exit foo
+y!
+enter foo
+foo x
+exit foo
+x!
+
+[case testModuleAttributes]
+import math
+import typing
+print(math.__name__)
+print(type(math.__dict__))
+print(type(math.__doc__ or ''))
+print(math.__class__)
+[out]
+math
+<class 'dict'>
+<class 'str'>
+<class 'module'>
+
+[case testSpecialAttributes]
+import typing
+class A: pass
+print(object().__doc__)
+print(A().__class__)
+[out]
+The most base type
+<class '__main__.A'>
+
+[case testFunctionAttributes]
+import typing
+ord.__class__
+print(type(ord.__doc__ + ''))
+print(ord.__name__)
+print(ord.__module__)
+[out]
+<class 'str'>
+ord
+builtins
+
+[case testTypeAttributes]
+import typing
+print(str.__class__)
+print(type(str.__doc__))
+print(str.__name__)
+print(str.__module__)
+print(str.__dict__ is not None)
+[out]
+<class 'type'>
+<class 'str'>
+str
+builtins
+True
+
+[case testBoolCompatibilityWithInt]
+import typing
+x = 0
+x = True
+print(bool('x'))
+print(bool(''))
+[out]
+True
+False
+
+[case testCallBuiltinTypeObjectsWithoutArguments]
+import typing
+print(int())
+print(repr(str()))
+print(repr(bytes()))
+print(float())
+print(bool())
+[out]
+0
+''
+b''
+0.0
+False
+
+[case testIntegerDivision]
+import typing
+x = 1 / 2
+x = 1.5
+[out]
+
+[case testStaticmethod]
+import typing
+class A:
+    @staticmethod
+    def f(x: str) -> int: return int(x)
+print(A.f('12'))
+print(A().f('34'))
+[out]
+12
+34
+
+[case testClassmethod]
+import typing
+class A:
+    @classmethod
+    def f(cls, x: str) -> int: return int(x)
+print(A.f('12'))
+print(A().f('34'))
+[out]
+12
+34
+
+[case testIntMethods]
+import typing
+print(int.from_bytes(b'ab', 'big'))
+n = 0
+print(n.from_bytes(b'ac', 'big'))
+print(n.from_bytes([2, 3], 'big'))
+print(n.to_bytes(2, 'big'))
+[out]
+24930
+24931
+515
+b'\x00\x00'
+
+[case testFloatMethods]
+import typing
+print(1.5.as_integer_ratio())
+print(1.5.hex())
+print(2.0.is_integer())
+print(float.fromhex('0x1.8'))
+[out]
+(3, 2)
+0x1.8000000000000p+0
+True
+1.5
+
+[case testArray]
+import typing
+import array
+array.array('b', [1, 2])
+[out]
+
+[case testDictFromkeys]
+import typing
+d = dict.fromkeys('foo')
+d['x'] = 2
+d2 = dict.fromkeys([1, 2], b'')
+d2[2] = b'foo'
+[out]
+
+[case testReadOnlyProperty]
+class A:
+    x = 2
+    @property
+    def f(self) -> int:
+        return self.x + 1
+print(A().f)
+[out]
+3
+
+[case testIsinstanceWithTuple]
+from typing import cast, Any
+x = cast(Any, (1, 'x'))
+if isinstance(x, tuple):
+    print(x[0], x[1])
+[out]
+1 x
+
+[case testTypevarValues]
+from typing import TypeVar
+T = TypeVar('T', str, bytes)
+def f(x: T) -> T:
+    if isinstance(x, str):
+        return 'foo'
+    else:
+        return b'bar'
+print(f(''))
+print(f(b''))
+[out]
+foo
+b'bar'
+
+[case testAnyStr]
+from typing import AnyStr
+def f(x: AnyStr) -> AnyStr:
+    if isinstance(x, str):
+        return 'foo'
+    else:
+        return b'zar'
+print(f(''))
+print(f(b''))
+[out]
+foo
+b'zar'
+
+[case testNameNotImportedFromTyping]
+import typing
+cast(int, 2)
+[out]
+_program.py:2: error: Name 'cast' is not defined
+
+[case testBinaryIOType]
+from typing import BinaryIO
+def f(f: BinaryIO) -> None:
+    f.write(b'foo')
+    f.write(bytearray(b'foo'))
+[out]
+
+[case testIOTypes]
+from typing import IO
+import sys
+def txt(f: IO[str]) -> None:
+    f.write('foo')
+    f.write(b'foo')
+def bin(f: IO[bytes]) -> None:
+    f.write(b'foo')
+    f.write(bytearray(b'foo'))
+txt(sys.stdout)
+bin(sys.stdout)
+[out]
+_program.py:5: error: Argument 1 to "write" of "IO" has incompatible type "bytes"; expected "str"
+_program.py:10: error: Argument 1 to "bin" has incompatible type "TextIO"; expected IO[bytes]
+
+[case testBuiltinOpen]
+f = open('x')
+f.write('x')
+f.write(b'x')
+f.foobar()
+[out]
+_program.py:4: error: IO[Any] has no attribute "foobar"
+
+[case testGenericPatterns]
+from typing import Pattern
+import re
+p = None  # type: Pattern[str]
+p = re.compile('foo*')
+b = None  # type: Pattern[bytes]
+b = re.compile(b'foo*')
+print(p.match('fooo').group(0))
+[out]
+fooo
+
+[case testGenericMatch]
+from typing import Match
+import re
+def f(m: Match[bytes]) -> None:
+    print(m.group(0))
+f(re.match(b'x*', b'xxy'))
+[out]
+b'xx'
+
+[case testMultipleTypevarsWithValues]
+from typing import TypeVar
+
+T = TypeVar('T', int, str)
+S = TypeVar('S', int, str)
+
+def f(t: T, s: S) -> None:
+    t + s
+[out]
+_program.py:7: error: Unsupported operand types for + ("int" and "str")
+_program.py:7: error: Unsupported operand types for + ("str" and "int")
+
+[case testSystemExitCode]
+import typing
+print(SystemExit(5).code)
+[out]
+5
+
+[case testIntFloatDucktyping]
+
+x = None  # type: float
+x = 2.2
+x = 2
+def f(x: float) -> None: pass
+f(1.1)
+f(1)
+[out]
+
+[case testsFloatOperations]
+import typing
+print(1.5 + 1.5)
+print(1.5 + 1)
+[out]
+3.0
+2.5
+
+[case testMathFunctionWithIntArgument]
+import typing
+import math
+math.sin(2)
+math.sin(2.2)
+
+[case testAbsReturnType]
+
+f = None  # type: float
+n = None  # type: int
+n = abs(2)
+f = abs(2.2)
+abs(2.2) + 'x'
+[out]
+_program.py:6: error: Unsupported operand types for + ("float" and "str")
+
+[case testROperatorMethods]
+
+b = None  # type: bytes
+s = None  # type: str
+s = b'foo' * 5 # Error
+b = 5 * b'foo'
+b = b'foo' * 5
+s = 5 * 'foo'
+s = 'foo' * 5
+[out]
+_program.py:4: error: Incompatible types in assignment (expression has type "bytes", variable has type "str")
+
+[case testROperatorMethods2]
+import typing
+print(2 / 0.5)
+print(' ', 2 * [3, 4])
+[out]
+4.0
+  [3, 4, 3, 4]
+
+[case testNotImplemented]
+import typing
+class A:
+    def __add__(self, x: int) -> int:
+        if isinstance(x, int):
+            return x + 1
+        return NotImplemented
+class B:
+    def __radd__(self, x: A) -> str:
+        return 'x'
+print(A() + 1)
+print(A() + B())
+[out]
+2
+x
+
+[case testMappingMethods]
+# Regression test
+from typing import Mapping
+x = {'x': 'y'} # type: Mapping[str, str]
+print('x' in x)
+print('y' in x)
+[out]
+True
+False
+
+[case testOverlappingOperatorMethods]
+
+class X: pass
+class A:
+    def __add__(self, x) -> int:
+        if isinstance(x, X):
+            return 1
+        return NotImplemented
+class B:
+    def __radd__(self, x: A) -> str: return 'x'
+class C(X, B): pass
+b = None  # type: B
+b = C()
+print(A() + b)
+[out]
+_program.py:9: error: Signatures of "__radd__" of "B" and "__add__" of "A" are unsafely overlapping
+
+[case testBytesAndBytearrayComparisons]
+import typing
+print(b'ab' < bytearray(b'b'))
+print(bytearray(b'ab') < b'a')
+[out]
+True
+False
+
+[case testBytesAndBytearrayComparisons2]
+import typing
+'' < b''
+b'' < ''
+'' < bytearray()
+bytearray() < ''
+[out]
+_program.py:2: error: Unsupported operand types for > ("bytes" and "str")
+_program.py:3: error: Unsupported operand types for > ("str" and "bytes")
+_program.py:4: error: Unsupported operand types for > ("bytearray" and "str")
+_program.py:5: error: Unsupported operand types for > ("str" and "bytearray")
+
+[case testInplaceOperatorMethod]
+import typing
+a = [1]
+print('', a.__iadd__([2]))
+print('', a)
+[out]
+ [1, 2]
+ [1, 2]
+
+[case testListInplaceAdd]
+import typing
+a = [1]
+a += iter([2, 3])
+print(tuple(a))
+[out]
+(1, 2, 3)
+
+[case testListConcatenateWithIterable]
+import typing
+[1] + iter([2, 3])
+[out]
+_program.py:2: error: Unsupported operand types for + ("list" and Iterator[int])
+
+[case testInferHeterogeneousListOfIterables]
+from typing import Sequence
+s = ['x', 'y'] # type: Sequence[str]
+a = [['x', 'x'], 'fo', s, iter('foo'), {'aa'}]
+for i, x in enumerate(a):
+    print(i, next(iter(x)))
+[out]
+0 x
+1 f
+2 x
+3 f
+4 aa
+
+[case testTextIOProperties]
+import typing
+import sys
+print(type(sys.stdin.encoding))
+print(type(sys.stdin.errors))
+sys.stdin.line_buffering
+sys.stdin.buffer
+sys.stdin.newlines
+[out]
+<class 'str'>
+<class 'str'>
+
+[case testIOProperties]
+import typing
+import sys
+print(sys.stdin.name)
+print(sys.stdin.buffer.mode)
+[out]
+<stdin>
+rb
+
+[case testSetUnion]
+import typing
+s = {'x', 'y'}
+print('>', sorted(s.union('foo')))
+[out]
+> ['f', 'o', 'x', 'y']
+
+[case testFromFuturePrintFunction]
+from __future__ import print_function
+print('a', 'b')
+[out]
+a b
+
+[case testLenOfTuple]
+import typing
+print(len((1, 'x')))
+[out]
+2
+
+[case testListMethods]
+import typing
+import sys
+l = [0, 1, 2, 3, 4]
+if sys.version >= '3.3':
+    l.clear()
+else:
+    l = []
+l.append(0)
+print('>', l)
+if sys.version >= '3.3':
+    m = l.copy()
+else:
+    m = l[:]
+m.extend([1, 2, 3, 4])
+print('>', m)
+print(l.index(0))
+print(l.index(0, 0))
+print(l.index(0, 0, 1))
+try:
+    print(l.index(1))
+    print('expected ValueError')
+except ValueError:
+    pass
+l.insert(0, 1)
+print('>', l)
+print(l.pop(0))
+print(l.pop())
+m.remove(0)
+try:
+    m.remove(0)
+    print('expected ValueError')
+except ValueError:
+    pass
+m.reverse()
+m.sort()
+m.sort(key=lambda x: -x)
+m.sort(reverse=False)
+m.sort(key=lambda x: -x, reverse=True)
+print('>', m)
+[out]
+> [0]
+> [0, 1, 2, 3, 4]
+0
+0
+0
+> [1, 0]
+1
+0
+> [1, 2, 3, 4]
+
+[case testListOperators]
+import typing
+l = [0, 1]
+print('+', l + [2])
+print('*', l * 2)
+print('*', 2 * l)
+print('in', 1 in l)
+print('==', l == [1, 2])
+print('!=', l != [1, 2])
+print('>', l > [1, 2, 3])
+print('>=', l >= [1, 2, 3])
+print('<', l < [1, 2, 3])
+print('<=', l <= [1, 2, 3])
+print('>[0]', l[0])
+l += [2]
+print('+=', l)
+l *= 2
+print('*=', l)
+print('iter', list(iter(l)))
+print('len', len(l))
+print('repr', repr(l))
+l[:3] = []
+print('setslice', l)
+print('reversed', list(reversed(l)))
+[out]
++ [0, 1, 2]
+* [0, 1, 0, 1]
+* [0, 1, 0, 1]
+in True
+== False
+!= True
+> False
+>= False
+< True
+<= True
+>[0] 0
++= [0, 1, 2]
+*= [0, 1, 2, 0, 1, 2]
+iter [0, 1, 2, 0, 1, 2]
+len 6
+repr [0, 1, 2, 0, 1, 2]
+setslice [0, 1, 2]
+reversed [2, 1, 0]
+
+[case testTupleAsSubtypeOfSequence]
+from typing import TypeVar, Sequence
+T = TypeVar('T')
+def f(a: Sequence[T]) -> None: print(a)
+f(tuple())
+[out]
+()
+
+[case testMapWithLambdaSpecialCase-skip]
+# TODO: Fix this; this was broken at some point but not sure why.
+from typing import List, Iterator
+a = [[1], [3]]
+b = map(lambda y: y[0], a)
+print('>', list(b))
+[out]
+> [1, 3]
+
+[case testInternalBuiltinDefinition]
+import typing
+def f(x: _T) -> None: pass
+[out]
+_program.py:2: error: Name '_T' is not defined
+
+[case testVarArgsFunctionSubtyping]
+import typing
+def f(*args: str) -> str: return args[0]
+map(f, ['x'])
+map(f, [1])
+[out]
+_program.py:4: error: Argument 1 to "map" has incompatible type Callable[[StarArg(str)], str]; expected Callable[[int], str]
+
+[case testMapStr]
+import typing
+x = range(3)
+a = list(map(str, x))
+a + 1
+[out]
+_program.py:4: error: Unsupported operand types for + (List[str] and "int")
+
+[case testNamedTuple]
+import typing
+from collections import namedtuple
+X = namedtuple('X', ['a', 'b'])
+x = X(a=1, b='s')
+print(x.a, x.b)
+[out]
+1 s
+
+[case testNamedTupleShortSyntax]
+import typing
+from collections import namedtuple
+X = namedtuple('X', ' a  b ')
+x = X(a=1, b='s')
+print(x.a, x.b)
+[out]
+1 s
+
+[case testNamedTupleError]
+import typing
+from collections import namedtuple
+X = namedtuple('X', ['a', 'b'])
+x = X(a=1, b='s')
+x.c
+[out]
+_program.py:5: error: "X" has no attribute "c"
+
+[case testNamedTupleTupleOperations]
+from typing import Iterable
+from collections import namedtuple
+X = namedtuple('X', ['a', 'b'])
+def f(x: Iterable[int]) -> None: pass
+x = X(a=1, b='s')
+f(x)
+print(len(x))
+print(x.index(1))
+print(x.count(1))
+print(x + x)
+[out]
+2
+0
+1
+(1, 's', 1, 's')
+
+[case testNamedTupleWithTypes]
+from typing import NamedTuple
+N = NamedTuple('N', [('a', int), ('b', str)])
+n = N(1, 'x')
+print(n)
+a, b = n
+print(a, b)
+print(n[0])
+[out]
+N(a=1, b='x')
+1 x
+1
+
+[case testRelativeImport]
+import typing
+from m import x
+print(x)
+[file m/__init__.py]
+from .n import x
+[file m/n.py]
+x = 1
+[out]
+1
+
+[case testRelativeImport2]
+import typing
+from m.n import x
+print(x)
+[file m/__init__.py]
+[file m/n.py]
+from .nn import x
+[file m/nn.py]
+x = 2
+[out]
+2
+
+[case testPyiTakesPrecedenceOverPy]
+import m
+m.f(1)
+[file m.py]
+def f(x):
+    print(x)
+[file m.pyi]
+import typing
+def f(x: str) -> None: pass
+[out]
+_program.py:2: error: Argument 1 to "f" has incompatible type "int"; expected "str"
+
+[case testAssignToComplexReal]
+import typing
+x = 4j
+y = x.real
+y = x         # Error
+x.real = 2.0  # Error
+[out]
+_program.py:4: error: Incompatible types in assignment (expression has type "complex", variable has type "float")
+_program.py:5: error: Property "real" defined in "complex" is read-only
+
+[case testComplexArithmetic]
+import typing
+print(5 + 8j)
+print(3j * 2.0)
+print(4J / 2.0)
+[out]
+(5+8j)
+6j
+2j
+
+[case testComplexArithmetic2]
+import typing
+x = 5 + 8j
+x = ''
+y = 3j * 2.0
+y = ''
+[out]
+_program.py:3: error: Incompatible types in assignment (expression has type "str", variable has type "complex")
+_program.py:5: error: Incompatible types in assignment (expression has type "str", variable has type "complex")
+
+[case testUnionTypeAlias]
+from typing import Union
+U = Union[int, str]
+u = 1 # type: U
+u = 1.1
+[out]
+_program.py:4: error: Incompatible types in assignment (expression has type "float", variable has type "Union[int, str]")
+
+[case testTupleTypeAlias]
+from typing import Tuple
+A = Tuple[int, str]
+u = 1, 'x' # type: A
+u = 1
+[out]
+_program.py:4: error: Incompatible types in assignment (expression has type "int", variable has type "Tuple[int, str]")
+
+[case testCallableTypeAlias]
+from typing import Callable
+A = Callable[[int], None]
+def f(x: A) -> None:
+    x(1)
+    x('')
+[out]
+_program.py:5: error: Argument 1 has incompatible type "str"; expected "int"
+
+[case testSuperNew]
+from typing import Dict, Any
+class MyType(type):
+    def __new__(cls, name: str, bases: tuple, namespace: Dict[str, Any]) -> type:
+        return super().__new__(cls, name + 'x', bases, namespace)
+class A(metaclass=MyType): pass
+print(type(A()).__name__)
+[out]
+Ax
+
+[case testSequenceIndexAndCount]
+from typing import Sequence
+def f(x: Sequence[int]) -> None:
+    print(x.index(1))
+    print(x.count(1))
+f([0, 0, 1, 1, 1])
+[out]
+2
+3
+
+[case testEscapeInTripleQuotedStrLiteral]
+print('''\'''')
+print(r"""\"""$""")
+[out]
+'
+\"""$
+
+[case testSubclassBothGenericAndNonGenericABC]
+from typing import Generic, TypeVar
+from abc import ABCMeta
+T = TypeVar('T')
+class A(metaclass=ABCMeta): pass
+class B(Generic[T]): pass
+class C(A, B): pass
+class D(B, A): pass
+class E(A, B[T], Generic[T]): pass
+class F(B[T], A, Generic[T]): pass
+def f(e: E[int], f: F[int]) -> None: pass
+[out]
+
+[case testOptional]
+from typing import Optional
+def f() -> Optional[int]: pass
+x = f()
+y = 1
+y = x
+
+[case testAppendToStarArg]
+import typing
+def f(*x: int) -> None:
+    x.append(1)
+f(1)
+[out]
+_program.py:3: error: Tuple[int, ...] has no attribute "append"
+
+[case testExit]
+print('a')
+exit(2)
+print('b')
+[out]
+a
+
+[case testTypeVariableTypeComparability]
+from typing import TypeVar
+T = TypeVar('T')
+def eq(x: T, y: T, z: T) -> T:
+    if x == y:
+        return y
+    else:
+        return z
+print(eq(1, 2, 3))
+print(eq('x', 'x', 'z'))
+[out]
+3
+x
+
+[case testIntDecimalCompatibility]
+import typing
+from decimal import Decimal
+print(Decimal(1) + 2)
+print(Decimal(1) - 2)
+print(1 + Decimal('2.34'))
+print(1 - Decimal('2.34'))
+print(2 * Decimal('2.34'))
+[out]
+3
+-1
+3.34
+-1.34
+4.68
+
+[case testInstantiateBuiltinTypes]
+from typing import Dict, Set, List
+d = dict()  # type: Dict[int, str]
+s = set()   # type: Set[int]
+l = list()  # type: List[int]
+str()
+bytes()
+bytearray()
+int()
+float()
+complex()
+slice(1)
+bool()
+
+[case testVariableLengthTuple]
+from typing import Tuple
+def p(t: Tuple[int, ...]) -> None:
+    for n in t:
+        print(n)
+p((1, 3, 2))
+[out]
+1
+3
+2
+
+[case testVariableLengthTupleError]
+from typing import Tuple
+def p(t: Tuple[str, ...]) -> None:
+    n = 5
+    print(t[n])
+    for s in t:
+        s()
+''.startswith(('x', 'y'))
+''.startswith(('x', b'y'))
+[out]
+_program.py:6: error: "str" not callable
+_program.py:8: error: Argument 1 to "startswith" of "str" has incompatible type "Tuple[str, bytes]"; expected "Union[str, Tuple[str, ...]]"
+
+[case testMultiplyTupleByInteger]
+n = 4
+t = ('',) * n
+t + 1
+[out]
+_program.py:3: error: Unsupported operand types for + (Tuple[str, ...] and "int")
+
+[case testMultiplyTupleByIntegerReverse]
+n = 4
+t = n * ('',)
+t + 1
+[out]
+_program.py:3: error: Unsupported operand types for + (Tuple[str, ...] and "int")
+
+[case testDictWithKeywordArgs]
+from typing import Dict, Any, List
+d1 = dict(a=1, b=2) # type: Dict[str, int]
+d2 = dict(a=1, b='') # type: Dict[str, int] # E
+d3 = dict(a=1, b=1)
+d3.xyz # E
+d4 = dict(a=1, b='') # type: Dict[str, Any]
+result = dict(x=[], y=[]) # type: Dict[str, List[str]]
+[out]
+_program.py:3: error: List item 1 has incompatible type "Tuple[str, str]"
+_program.py:5: error: Dict[str, int] has no attribute "xyz"
+
+[case testDefaultDict]
+import typing as t
+from collections import defaultdict
+
+T = t.TypeVar('T')
+
+d1 = defaultdict(list) # type: t.DefaultDict[int, str]
+d2 = defaultdict() # type: t.DefaultDict[int, str]
+d2[0] = '0'
+d2['0'] = 0
+
+def tst(dct: t.DefaultDict[int, T]) -> T:
+    return dct[0]
+
+collections = ['coins', 'stamps', 'comics'] # type: t.List[str]
+d3 = defaultdict(str) # type: t.DefaultDict[int, str]
+collections[2]
+
+tst(defaultdict(list, {0: []}))
+tst(defaultdict(list, {'0': []}))
+
+class MyDDict(t.DefaultDict[int,T], t.Generic[T]):
+    pass
+MyDDict(dict)['0']
+MyDDict(dict)[0]
+[out]
+_program.py:6: error: Argument 1 to "defaultdict" has incompatible type List[_T]; expected Callable[[], str]
+_program.py:9: error: Invalid index type "str" for "dict"; expected type "int"
+_program.py:9: error: Incompatible types in assignment (expression has type "int", target has type "str")
+_program.py:19: error: List item 0 has incompatible type "Tuple[str, List[None]]"
+_program.py:23: error: Invalid index type "str" for "dict"; expected type "int"
+
+[case testDictUpdateInference]
+from typing import Dict, Optional
+d = {}  # type: Dict[str, Optional[int]]
+d.update({str(i): None for i in range(4)})
+
+[case testSuperAndSetattr]
+class A:
+    def __init__(self) -> None:
+        super().__setattr__('a', 1)
+        super().__setattr__(1, 'a')
+[out]
+_program.py:4: error: Argument 1 to "__setattr__" of "object" has incompatible type "int"; expected "str"
+
+[case testMetaclassAndSuper]
+class A(type):
+    def __new__(cls, name, bases, namespace) -> 'type':
+        return super().__new__(cls, '', (object,), {'x': 7})
+
+class B(metaclass=A):
+    pass
+
+print(getattr(B(), 'x'))
+[out]
+7
+
+[case testSortedNoError]
+from typing import Iterable, Callable, TypeVar, List, Dict
+T = TypeVar('T')
+def sorted(x: Iterable[T], *, key: Callable[[T], object] = None) -> None: ...
+a = None # type: List[Dict[str, str]]
+sorted(a, key=lambda y: y[''])
+
+[case testAbstractProperty]
+from abc import abstractproperty, ABCMeta
+class A(metaclass=ABCMeta):
+    @abstractproperty
+    def x(self) -> int: pass
+class B(A):
+    @property
+    def x(self) -> int:
+        return 3
+b = B()
+print(b.x + 1)
+[out]
+4
+
+[case testInferenceWithLambda]
+from typing import TypeVar, Iterable, Iterator
+import itertools
+
+_T = TypeVar('_T')
+
+def f(iterable): # type: (Iterable[_T]) -> Iterator[List[_T]]
+    grouped = itertools.groupby(enumerate(iterable), lambda pair: pair[0] // 2)
+    return ([elem for _, elem in group] for _, group in grouped)
+
+[case testReModuleBytes]
+# Regression tests for various overloads in the re module -- bytes version
+import re
+bre = b'a+'
+bpat = re.compile(bre)
+bpat = re.compile(bpat)
+re.search(bre, b'').groups()
+re.search(bre, u'') # Error
+re.search(bpat, b'').groups()
+re.search(bpat, u'') # Error
+# match(), split(), findall(), finditer() are much the same, so skip those.
+# sub(), subn() have more overloads and we are checking these:
+re.sub(bre, b'', b'') + b''
+re.sub(bpat, b'', b'') + b''
+re.sub(bre, lambda m: b'', b'') + b''
+re.sub(bpat, lambda m: b'', b'') + b''
+re.subn(bre, b'', b'')[0] + b''
+re.subn(bpat, b'', b'')[0] + b''
+re.subn(bre, lambda m: b'', b'')[0] + b''
+re.subn(bpat, lambda m: b'', b'')[0] + b''
+[out]
+_program.py:7: error: Type argument 1 of "search" has incompatible value "object"
+_program.py:9: error: Cannot infer type argument 1 of "search"
+
+[case testReModuleString]
+# Regression tests for various overloads in the re module -- string version
+import re
+sre = 'a+'
+spat = re.compile(sre)
+spat = re.compile(spat)
+re.search(sre, '').groups()
+re.search(sre, b'') # Error
+re.search(spat, '').groups()
+re.search(spat, b'') # Error
+# match(), split(), findall(), finditer() are much the same, so skip those.
+# sus(), susn() have more overloads and we are checking these:
+re.sub(sre, '', '') + ''
+re.sub(spat, '', '') + ''
+re.sub(sre, lambda m: '', '') + ''
+re.sub(spat, lambda m: '', '') + ''
+re.subn(sre, '', '')[0] + ''
+re.subn(spat, '', '')[0] + ''
+re.subn(sre, lambda m: '', '')[0] + ''
+re.subn(spat, lambda m: '', '')[0] + ''
+[out]
+_program.py:7: error: Type argument 1 of "search" has incompatible value "object"
+_program.py:9: error: Cannot infer type argument 1 of "search"
+
+[case testListSetitemTuple]
+from typing import List, Tuple
+a = []  # type: List[Tuple[str, int]]
+a[0] = 'x', 1
+a[1] = 2, 'y'
+a[:] = [('z', 3)]
+[out]
+_program.py:4: error: Incompatible types in assignment (expression has type "Tuple[int, str]", target has type "Tuple[str, int]")
diff --git a/test-data/unit/semanal-abstractclasses.test b/test-data/unit/semanal-abstractclasses.test
new file mode 100644
index 0000000..b5147bd
--- /dev/null
+++ b/test-data/unit/semanal-abstractclasses.test
@@ -0,0 +1,119 @@
+[case testAbstractMethods]
+from abc import abstractmethod, ABCMeta
+import typing
+
+class A(metaclass=ABCMeta):
+  @abstractmethod
+  def g(self) -> 'A': pass
+  @abstractmethod
+  def f(self) -> 'A': return self
+[out]
+MypyFile:1(
+  ImportFrom:1(abc, [abstractmethod, ABCMeta])
+  Import:2(typing)
+  ClassDef:4(
+    A
+    Metaclass(ABCMeta)
+    Decorator:5(
+      Var(g)
+      FuncDef:6(
+        g
+        Args(
+          Var(self))
+        def (self: __main__.A) -> __main__.A
+        Abstract
+        Block:6(
+          PassStmt:6())))
+    Decorator:7(
+      Var(f)
+      FuncDef:8(
+        f
+        Args(
+          Var(self))
+        def (self: __main__.A) -> __main__.A
+        Abstract
+        Block:8(
+          ReturnStmt:8(
+            NameExpr(self [l])))))))
+
+[case testClassInheritingTwoAbstractClasses]
+from abc import abstractmethod, ABCMeta
+import typing
+
+class A(metaclass=ABCMeta): pass
+class B(metaclass=ABCMeta): pass
+class C(A, B): pass
+[out]
+MypyFile:1(
+  ImportFrom:1(abc, [abstractmethod, ABCMeta])
+  Import:2(typing)
+  ClassDef:4(
+    A
+    Metaclass(ABCMeta)
+    PassStmt:4())
+  ClassDef:5(
+    B
+    Metaclass(ABCMeta)
+    PassStmt:5())
+  ClassDef:6(
+    C
+    BaseType(
+      __main__.A
+      __main__.B)
+    PassStmt:6()))
+
+[case testAbstractGenericClass]
+from abc import abstractmethod
+from typing import Generic, TypeVar
+T = TypeVar('T')
+class A(Generic[T]):
+  @abstractmethod
+  def f(self) -> 'A[T]': pass
+[out]
+MypyFile:1(
+  ImportFrom:1(abc, [abstractmethod])
+  ImportFrom:2(typing, [Generic, TypeVar])
+  AssignmentStmt:3(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:3())
+  ClassDef:4(
+    A
+    TypeVars(
+      T)
+    Decorator:5(
+      Var(f)
+      FuncDef:6(
+        f
+        Args(
+          Var(self))
+        def (self: __main__.A[T`1]) -> __main__.A[T`1]
+        Abstract
+        Block:6(
+          PassStmt:6())))))
+
+[case testFullyQualifiedAbstractMethodDecl]
+import abc
+from abc import ABCMeta
+import typing
+
+class A(metaclass=ABCMeta):
+  @abc.abstractmethod
+  def g(self) -> 'A': pass
+[out]
+MypyFile:1(
+  Import:1(abc)
+  ImportFrom:2(abc, [ABCMeta])
+  Import:3(typing)
+  ClassDef:5(
+    A
+    Metaclass(ABCMeta)
+    Decorator:6(
+      Var(g)
+      FuncDef:7(
+        g
+        Args(
+          Var(self))
+        def (self: __main__.A) -> __main__.A
+        Abstract
+        Block:7(
+          PassStmt:7())))))
diff --git a/test-data/unit/semanal-basic.test b/test-data/unit/semanal-basic.test
new file mode 100644
index 0000000..3c11da8
--- /dev/null
+++ b/test-data/unit/semanal-basic.test
@@ -0,0 +1,459 @@
+[case testEmptyFile]
+[out]
+MypyFile:1()
+
+[case testGlobalVariable]
+x = 1
+x
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  ExpressionStmt:2(
+    NameExpr(x [__main__.x])))
+
+[case testMultipleGlobals]
+x = y = 2
+z = 3
+(x, y, z)
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(2))
+  AssignmentStmt:2(
+    NameExpr(z* [__main__.z])
+    IntExpr(3))
+  ExpressionStmt:3(
+    TupleExpr:3(
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y])
+      NameExpr(z [__main__.z]))))
+
+[case testEmptyFunction]
+def f(): pass
+f()
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      PassStmt:1()))
+  ExpressionStmt:2(
+    CallExpr:2(
+      NameExpr(f [__main__.f])
+      Args())))
+
+[case testAccessingGlobalNameBeforeDefinition]
+x
+f()
+x = 1
+def f(): pass
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    NameExpr(x [__main__.x]))
+  ExpressionStmt:2(
+    CallExpr:2(
+      NameExpr(f [__main__.f])
+      Args()))
+  AssignmentStmt:3(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  FuncDef:4(
+    f
+    Block:4(
+      PassStmt:4())))
+
+[case testFunctionArgs]
+def f(x, y):
+  (x, y)
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x)
+      Var(y))
+    Block:1(
+      ExpressionStmt:2(
+        TupleExpr:2(
+          NameExpr(x [l])
+          NameExpr(y [l]))))))
+
+[case testLocalVar]
+def f():
+  x = 1
+  x
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      AssignmentStmt:2(
+        NameExpr(x* [l])
+        IntExpr(1))
+      ExpressionStmt:3(
+        NameExpr(x [l])))))
+
+[case testAccessGlobalInFn]
+def f():
+  x
+  g()
+x = 1
+def g(): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ExpressionStmt:2(
+        NameExpr(x [__main__.x]))
+      ExpressionStmt:3(
+        CallExpr:3(
+          NameExpr(g [__main__.g])
+          Args()))))
+  AssignmentStmt:4(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  FuncDef:5(
+    g
+    Block:5(
+      PassStmt:5())))
+
+[case testAssignmentAfterInit]
+x = 1
+x = 2
+def f(y):
+  y = 1
+  z = 1
+  z = 2
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    IntExpr(2))
+  FuncDef:3(
+    f
+    Args(
+      Var(y))
+    Block:3(
+      AssignmentStmt:4(
+        NameExpr(y [l])
+        IntExpr(1))
+      AssignmentStmt:5(
+        NameExpr(z* [l])
+        IntExpr(1))
+      AssignmentStmt:6(
+        NameExpr(z [l])
+        IntExpr(2)))))
+
+[case testLocalAndGlobalAliasing]
+x = 1
+def f():
+  x = 2
+  x
+x
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  FuncDef:2(
+    f
+    Block:2(
+      AssignmentStmt:3(
+        NameExpr(x* [l])
+        IntExpr(2))
+      ExpressionStmt:4(
+        NameExpr(x [l]))))
+  ExpressionStmt:5(
+    NameExpr(x [__main__.x])))
+
+[case testArgumentInitializers]
+def f(x = f, y = object):
+  x, y
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x)
+      Var(y))
+    Init(
+      AssignmentStmt:1(
+        NameExpr(x [l])
+        NameExpr(f [__main__.f]))
+      AssignmentStmt:1(
+        NameExpr(y [l])
+        NameExpr(object [builtins.object])))
+    Block:1(
+      ExpressionStmt:2(
+        TupleExpr:2(
+          NameExpr(x [l])
+          NameExpr(y [l]))))))
+
+[case testVarArgs]
+def f(x, *y):
+  x, y
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    VarArg(
+      Var(y))
+    Block:1(
+      ExpressionStmt:2(
+        TupleExpr:2(
+          NameExpr(x [l])
+          NameExpr(y [l]))))))
+
+[case testGlobalDecl]
+x = None
+def f():
+    global x
+    x = None
+    x
+class A: pass
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    NameExpr(None [builtins.None]))
+  FuncDef:2(
+    f
+    Block:2(
+      GlobalDecl:3(
+        x)
+      AssignmentStmt:4(
+        NameExpr(x [__main__.x])
+        NameExpr(None [builtins.None]))
+      ExpressionStmt:5(
+        NameExpr(x [__main__.x]))))
+  ClassDef:6(
+    A
+    PassStmt:6()))
+
+[case testMultipleNamesInGlobalDecl]
+x, y = None, None
+def f():
+    global x, y
+    x = y
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    TupleExpr:1(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    TupleExpr:1(
+      NameExpr(None [builtins.None])
+      NameExpr(None [builtins.None])))
+  FuncDef:2(
+    f
+    Block:2(
+      GlobalDecl:3(
+        x
+        y)
+      AssignmentStmt:4(
+        NameExpr(x [__main__.x])
+        NameExpr(y [__main__.y])))))
+
+[case testGlobalDeclScope]
+x = None
+def f():
+    global x
+def g():
+    x = None
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    NameExpr(None [builtins.None]))
+  FuncDef:2(
+    f
+    Block:2(
+      GlobalDecl:3(
+        x)))
+  FuncDef:4(
+    g
+    Block:4(
+      AssignmentStmt:5(
+        NameExpr(x* [l])
+        NameExpr(None [builtins.None])))))
+
+[case testGlobalDeclScope]
+x = None
+def f():
+    global x
+def g():
+    x = None
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    NameExpr(None [builtins.None]))
+  FuncDef:2(
+    f
+    Block:2(
+      GlobalDecl:3(
+        x)))
+  FuncDef:4(
+    g
+    Block:4(
+      AssignmentStmt:5(
+        NameExpr(x* [l])
+        NameExpr(None [builtins.None])))))
+
+[case testGlobaWithinMethod]
+x = None
+class A:
+  def f(self):
+    global x
+    x = self
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    NameExpr(None [builtins.None]))
+  ClassDef:2(
+    A
+    FuncDef:3(
+      f
+      Args(
+        Var(self))
+      Block:3(
+        GlobalDecl:4(
+          x)
+        AssignmentStmt:5(
+          NameExpr(x [__main__.x])
+          NameExpr(self [l]))))))
+
+[case testGlobalDefinedInBlock]
+if object:
+    x = object()
+    x = x
+x
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      NameExpr(object [builtins.object]))
+    Then(
+      AssignmentStmt:2(
+        NameExpr(x* [__main__.x])
+        CallExpr:2(
+          NameExpr(object [builtins.object])
+          Args()))
+      AssignmentStmt:3(
+        NameExpr(x [__main__.x])
+        NameExpr(x [__main__.x]))))
+  ExpressionStmt:4(
+    NameExpr(x [__main__.x])))
+
+[case testNonlocalDecl]
+def g():
+    x = None
+    def f():
+        nonlocal x
+        x = None
+        x
+[out]
+MypyFile:1(
+  FuncDef:1(
+    g
+    Block:1(
+      AssignmentStmt:2(
+        NameExpr(x* [l])
+        NameExpr(None [builtins.None]))
+      FuncDef:3(
+        f
+        Block:3(
+          NonlocalDecl:4(
+            x)
+          AssignmentStmt:5(
+            NameExpr(x [l])
+            NameExpr(None [builtins.None]))
+          ExpressionStmt:6(
+            NameExpr(x [l])))))))
+
+[case testMultipleNamesInNonlocalDecl]
+def g():
+    x, y = None, None
+    def f(z):
+        nonlocal x, y
+        x = y
+[out]
+MypyFile:1(
+  FuncDef:1(
+    g
+    Block:1(
+      AssignmentStmt:2(
+        TupleExpr:2(
+          NameExpr(x* [l])
+          NameExpr(y* [l]))
+        TupleExpr:2(
+          NameExpr(None [builtins.None])
+          NameExpr(None [builtins.None])))
+      FuncDef:3(
+        f
+        Args(
+          Var(z))
+        Block:3(
+          NonlocalDecl:4(
+            x
+            y)
+          AssignmentStmt:5(
+            NameExpr(x [l])
+            NameExpr(y [l])))))))
+
+[case testNestedFunctions]
+def f(x):
+    def g(y):
+        z = y + x
+    return g
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    Block:1(
+      FuncDef:2(
+        g
+        Args(
+          Var(y))
+        Block:2(
+          AssignmentStmt:3(
+            NameExpr(z* [l])
+            OpExpr:3(
+              +
+              NameExpr(y [l])
+              NameExpr(x [l])))))
+      ReturnStmt:4(
+        NameExpr(g [l])))))
+
+[case testNestedFunctionWithOverlappingName]
+def f(x):
+    def g():
+        x = 1
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    Block:1(
+      FuncDef:2(
+        g
+        Block:2(
+          AssignmentStmt:3(
+            NameExpr(x* [l])
+            IntExpr(1)))))))
diff --git a/test-data/unit/semanal-classes.test b/test-data/unit/semanal-classes.test
new file mode 100644
index 0000000..a99f851
--- /dev/null
+++ b/test-data/unit/semanal-classes.test
@@ -0,0 +1,623 @@
+-- Test cases related to classes for the semantic analyzer.
+
+[case testSimpleClass]
+class A: pass
+x = A
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    PassStmt:1())
+  AssignmentStmt:2(
+    NameExpr(x* [__main__.x])
+    NameExpr(A [__main__.A])))
+
+[case testMethods]
+class A:
+  def __init__(self, x):
+    y = x
+  def f(self):
+    y = self
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      __init__
+      Args(
+        Var(self)
+        Var(x))
+      Block:2(
+        AssignmentStmt:3(
+          NameExpr(y* [l])
+          NameExpr(x [l]))))
+    FuncDef:4(
+      f
+      Args(
+        Var(self))
+      Block:4(
+        AssignmentStmt:5(
+          NameExpr(y* [l])
+          NameExpr(self [l]))))))
+
+[case testMemberDefinitionInInit]
+class A:
+  def __init__(self):
+    self.x = 1
+    self.y = 2
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      __init__
+      Args(
+        Var(self))
+      Block:2(
+        AssignmentStmt:3(
+          MemberExpr:3(
+            NameExpr(self [l])
+            x*)
+          IntExpr(1))
+        AssignmentStmt:4(
+          MemberExpr:4(
+            NameExpr(self [l])
+            y*)
+          IntExpr(2))))))
+
+[case testMemberAssignmentViaSelfOutsideInit]
+class A:
+  def f(self):
+    self.x = 1
+def __init__(self):
+  self.y = 1
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      f
+      Args(
+        Var(self))
+      Block:2(
+        AssignmentStmt:3(
+          MemberExpr:3(
+            NameExpr(self [l])
+            x*)
+          IntExpr(1)))))
+  FuncDef:4(
+    __init__
+    Args(
+      Var(self))
+    Block:4(
+      AssignmentStmt:5(
+        MemberExpr:5(
+          NameExpr(self [l])
+          y)
+        IntExpr(1)))))
+
+[case testMemberAssignmentNotViaSelf]
+class A:
+  def __init__(x, self):
+    self.y = 1 # not really self
+class B:
+  def __init__(x):
+    self = x
+    self.z = 1
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      __init__
+      Args(
+        Var(x)
+        Var(self))
+      Block:2(
+        AssignmentStmt:3(
+          MemberExpr:3(
+            NameExpr(self [l])
+            y)
+          IntExpr(1)))))
+  ClassDef:4(
+    B
+    FuncDef:5(
+      __init__
+      Args(
+        Var(x))
+      Block:5(
+        AssignmentStmt:6(
+          NameExpr(self* [l])
+          NameExpr(x [l]))
+        AssignmentStmt:7(
+          MemberExpr:7(
+            NameExpr(self [l])
+            z)
+          IntExpr(1))))))
+
+[case testNonStandardNameForSelfAndInit]
+class A:
+  def __init__(x):
+    x.y = 1
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      __init__
+      Args(
+        Var(x))
+      Block:2(
+        AssignmentStmt:3(
+          MemberExpr:3(
+            NameExpr(x [l])
+            y*)
+          IntExpr(1))))))
+
+[case testAssignmentAfterAttributeInit]
+class A:
+  def __init__(self):
+    self.x = 1
+    self.x = 2
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      __init__
+      Args(
+        Var(self))
+      Block:2(
+        AssignmentStmt:3(
+          MemberExpr:3(
+            NameExpr(self [l])
+            x*)
+          IntExpr(1))
+        AssignmentStmt:4(
+          MemberExpr:4(
+            NameExpr(self [l])
+            x)
+          IntExpr(2))))))
+
+[case testOverloadedMethod]
+from typing import overload
+class A:
+  @overload
+  def f(self) -> None: self
+  @overload
+  def f(self, x: 'A') -> None: self
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [overload])
+  ClassDef:2(
+    A
+    OverloadedFuncDef:3(
+      Overload(def (self: __main__.A), \
+               def (self: __main__.A, x: __main__.A))
+      Decorator:3(
+        Var(f)
+        NameExpr(overload [typing.overload])
+        FuncDef:4(
+          f
+          Args(
+            Var(self))
+          def (self: __main__.A)
+          Block:4(
+            ExpressionStmt:4(
+              NameExpr(self [l])))))
+      Decorator:5(
+        Var(f)
+        NameExpr(overload [typing.overload])
+        FuncDef:6(
+          f
+          Args(
+            Var(self)
+            Var(x))
+          def (self: __main__.A, x: __main__.A)
+          Block:6(
+            ExpressionStmt:6(
+              NameExpr(self [l]))))))))
+
+[case testAttributeWithoutType]
+class A:
+    a = object
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    AssignmentStmt:2(
+      NameExpr(a* [m])
+      NameExpr(object [builtins.object]))))
+
+[case testDataAttributeRefInClassBody]
+class A:
+    x = 1
+    y = x
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    AssignmentStmt:2(
+      NameExpr(x* [m])
+      IntExpr(1))
+    AssignmentStmt:3(
+      NameExpr(y* [m])
+      NameExpr(x [m]))))
+
+[case testMethodRefInClassBody]
+class A:
+    def f(self): pass
+    g = f
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      f
+      Args(
+        Var(self))
+      Block:2(
+        PassStmt:2()))
+    AssignmentStmt:3(
+      NameExpr(g* [m])
+      NameExpr(f [m]))))
+
+[case testIfStatementInClassBody]
+class A:
+    if A:
+        x = 1
+    else:
+        x = 2
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    IfStmt:2(
+      If(
+        NameExpr(A [__main__.A]))
+      Then(
+        AssignmentStmt:3(
+          NameExpr(x* [m])
+          IntExpr(1)))
+      Else(
+        AssignmentStmt:5(
+          NameExpr(x [m])
+          IntExpr(2))))))
+
+[case testForStatementInClassBody]
+class A:
+    for x in [1, 2]:
+        y = x
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    ForStmt:2(
+      NameExpr(x* [m])
+      ListExpr:2(
+        IntExpr(1)
+        IntExpr(2))
+      Block:2(
+        AssignmentStmt:3(
+          NameExpr(y* [m])
+          NameExpr(x [m]))))))
+
+[case testReferenceToClassWithinFunction]
+def f():
+    class A: pass
+    A
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ClassDef:2(
+        A
+        PassStmt:2())
+      ExpressionStmt:3(
+        NameExpr(A [l])))))
+
+[case testReferenceToClassWithinClass]
+class A:
+    class B: pass
+    B
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    ClassDef:2(
+      B
+      PassStmt:2())
+    ExpressionStmt:3(
+      NameExpr(B [__main__.A.B]))))
+
+[case testClassWithBaseClassWithinClass]
+class A:
+    class B: pass
+    class C(B): pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    ClassDef:2(
+      B
+      PassStmt:2())
+    ClassDef:3(
+      C
+      BaseType(
+        __main__.A.B)
+      PassStmt:3())))
+
+[case testDeclarationReferenceToNestedClass]
+def f() -> None:
+    class A: pass
+    x = None # type: A
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def ()
+    Block:1(
+      ClassDef:2(
+        A
+        PassStmt:2())
+      AssignmentStmt:3(
+        NameExpr(x [l])
+        NameExpr(None [builtins.None])
+        A))))
+
+[case testAccessToLocalInOuterScopeWithinNestedClass]
+def f(x):
+    class A:
+        y = x
+        def g(self):
+            z = x
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    Block:1(
+      ClassDef:2(
+        A
+        AssignmentStmt:3(
+          NameExpr(y* [m])
+          NameExpr(x [l]))
+        FuncDef:4(
+          g
+          Args(
+            Var(self))
+          Block:4(
+            AssignmentStmt:5(
+              NameExpr(z* [l])
+              NameExpr(x [l]))))))))
+
+[case testQualifiedMetaclass]
+import abc
+class A(metaclass=abc.ABCMeta): pass
+[out]
+MypyFile:1(
+  Import:1(abc)
+  ClassDef:2(
+    A
+    Metaclass(abc.ABCMeta)
+    PassStmt:2()))
+
+[case testStaticMethod]
+class A:
+  @staticmethod
+  def f(z: int) -> str: pass
+[builtins fixtures/staticmethod.pyi]
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    Decorator:2(
+      Var(f)
+      FuncDef:3(
+        f
+        Args(
+          Var(z))
+        def (z: builtins.int) -> builtins.str
+        Static
+        Block:3(
+          PassStmt:3())))))
+
+[case testStaticMethodWithNoArgs]
+class A:
+  @staticmethod
+  def f() -> str: pass
+[builtins fixtures/staticmethod.pyi]
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    Decorator:2(
+      Var(f)
+      FuncDef:3(
+        f
+        def () -> builtins.str
+        Static
+        Block:3(
+          PassStmt:3())))))
+
+[case testClassMethod]
+class A:
+  @classmethod
+  def f(cls, z: int) -> str: pass
+[builtins fixtures/classmethod.pyi]
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    Decorator:2(
+      Var(f)
+      FuncDef:3(
+        f
+        Args(
+          Var(cls)
+          Var(z))
+        def (cls: def () -> __main__.A, z: builtins.int) -> builtins.str
+        Class
+        Block:3(
+          PassStmt:3())))))
+
+[case testClassMethodWithNoArgs]
+class A:
+  @classmethod
+  def f(cls) -> str: pass
+[builtins fixtures/classmethod.pyi]
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    Decorator:2(
+      Var(f)
+      FuncDef:3(
+        f
+        Args(
+          Var(cls))
+        def (cls: def () -> __main__.A) -> builtins.str
+        Class
+        Block:3(
+          PassStmt:3())))))
+
+[case testProperty]
+import typing
+class A:
+  @property
+  def f(self) -> str: pass
+[builtins fixtures/property.pyi]
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ClassDef:2(
+    A
+    Decorator:3(
+      Var(f)
+      FuncDef:4(
+        f
+        Args(
+          Var(self))
+        def (self: __main__.A) -> builtins.str
+        Property
+        Block:4(
+          PassStmt:4())))))
+
+[case testClassDecorator]
+import typing
+ at object
+class A: pass
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ClassDef:2(
+    A
+    Decorators(
+      NameExpr(object [builtins.object]))
+    PassStmt:3()))
+
+[case testClassAttributeAsMethodDefaultArgumentValue]
+import typing
+class A:
+    X = 1
+    def f(self, x : int = X) -> None: pass
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ClassDef:2(
+    A
+    AssignmentStmt:3(
+      NameExpr(X* [m])
+      IntExpr(1))
+    FuncDef:4(
+      f
+      Args(
+        Var(self)
+        Var(x))
+      def (self: __main__.A, x: builtins.int =)
+      Init(
+        AssignmentStmt:4(
+          NameExpr(x [l])
+          NameExpr(X [m])))
+      Block:4(
+        PassStmt:4()))))
+
+[case testInvalidBaseClass]
+from typing import Any, Callable
+class A(None): pass
+class B(Any): pass
+class C(Callable[[], int]): pass
+[out]
+main: error: Invalid base class
+main:4: error: Invalid base class
+
+[case testTupleAsBaseClass]
+import m
+[file m.pyi]
+from typing import Tuple
+class A(Tuple[int, str]): pass
+[builtins fixtures/tuple.pyi]
+[out]
+MypyFile:1(
+  Import:1(m))
+MypyFile:1(
+  tmp/m.pyi
+  ImportFrom:1(typing, [Tuple])
+  ClassDef:2(
+    A
+    TupleType(
+      Tuple[builtins.int, builtins.str])
+    BaseType(
+      builtins.tuple[Any])
+    PassStmt:2()))
+
+[case testBaseClassFromIgnoredModule]
+import m # type: ignore
+class B(m.A):
+   pass
+[out]
+MypyFile:1(
+  Import:1(m)
+  ClassDef:2(
+    B
+    FallbackToAny
+    BaseType(
+      builtins.object)
+    PassStmt:3())
+  IgnoredLines(1))
+
+[case testBaseClassFromIgnoredModuleUsingImportFrom]
+from m import A # type: ignore
+class B(A, int):
+   pass
+[out]
+MypyFile:1(
+  ImportFrom:1(m, [A])
+  ClassDef:2(
+    B
+    FallbackToAny
+    BaseType(
+      builtins.int)
+    PassStmt:3())
+  IgnoredLines(1))
+
+[case testBaseClassWithExplicitAnyType]
+from typing import Any
+A = 1 # type: Any
+class B(A):
+   pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Any])
+  AssignmentStmt:2(
+    NameExpr(A [__main__.A])
+    IntExpr(1)
+    Any)
+  ClassDef:3(
+    B
+    FallbackToAny
+    BaseType(
+      builtins.object)
+    PassStmt:4()))
diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test
new file mode 100644
index 0000000..aa45dde
--- /dev/null
+++ b/test-data/unit/semanal-errors.test
@@ -0,0 +1,1336 @@
+[case testUndefinedVariableInGlobalStatement]
+import typing
+x
+y
+[out]
+main:2: error: Name 'x' is not defined
+main:3: error: Name 'y' is not defined
+
+[case testUndefinedVariableWithinFunctionContext]
+import typing
+def f() -> None:
+  x
+y
+[out]
+main:3: error: Name 'x' is not defined
+main:4: error: Name 'y' is not defined
+
+[case testMethodScope]
+import typing
+class A:
+  def f(self): pass
+f
+[out]
+main:4: error: Name 'f' is not defined
+
+[case testMethodScope2]
+import typing
+class A:
+  def f(self): pass
+class B:
+  def g(self) -> None:
+    f # error
+    g # error
+[out]
+main:6: error: Name 'f' is not defined
+main:7: error: Name 'g' is not defined
+
+[case testInvalidType]
+import typing
+x = None # type: X
+[out]
+main:2: error: Name 'X' is not defined
+
+[case testInvalidGenericArg]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class A(Generic[t]): pass
+x = 0 # type: A[y]
+[out]
+main:4: error: Name 'y' is not defined
+
+[case testInvalidNumberOfGenericArgsInTypeDecl]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class A: pass
+class B(Generic[t]): pass
+x = 0 # type: B[A, A]
+y = 0 # type: A[A]
+[out]
+main:5: error: "B" expects 1 type argument, but 2 given
+main:6: error: "A" expects no type arguments, but 1 given
+
+[case testInvalidNumberOfGenericArgsInUndefinedArg]
+
+class A: pass
+x = None  # type: A[int] # E: "A" expects no type arguments, but 1 given
+[out]
+
+[case testInvalidNumberOfGenericArgsInNestedBlock]
+
+class A: pass
+class B:
+    def f(self) -> None:
+        while 1:
+            x = None  # type: A[int] \
+                # E: "A" expects no type arguments, but 1 given
+[out]
+
+[case testInvalidNumberOfGenericArgsInSignature]
+import typing
+class A: pass
+def f() -> A[int]: pass # E: "A" expects no type arguments, but 1 given
+[out]
+
+[case testInvalidNumberOfGenericArgsInOverloadedSignature]
+from typing import overload
+class A: pass
+ at overload
+def f(): pass
+ at overload # E: "A" expects no type arguments, but 1 given
+def f(x: A[int]) -> None: pass
+[out]
+
+[case testInvalidNumberOfGenericArgsInBaseType]
+import typing
+class A: pass
+class B(A[int]): pass # E: "A" expects no type arguments, but 1 given
+[out]
+
+[case testInvalidNumberOfGenericArgsInCast]
+from typing import cast
+class A: pass
+x = cast(A[int], 1) # E: "A" expects no type arguments, but 1 given
+[out]
+
+[case testInvalidNumberOfGenericArgsInTypeApplication]
+import typing
+class A: pass
+class B: pass
+x = A[B[int]]() # E: "B" expects no type arguments, but 1 given
+[out]
+
+[case testInvalidNumberOfGenericArgsInNestedGenericType]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]): pass
+class B: pass
+def f() -> A[B[int]]: pass # E: "B" expects no type arguments, but 1 given
+[out]
+
+[case testInvalidNumberOfGenericArgsInTupleType]
+from typing import Tuple
+class A: pass
+x = None # type: Tuple[A[int]] # E: "A" expects no type arguments, but 1 given
+[out]
+
+[case testInvalidNumberOfGenericArgsInFunctionType]
+from typing import Callable
+class A: pass
+x = None # type: Callable[[A[int]], int]  # E: "A" expects no type arguments, but 1 given
+y = None # type: Callable[[], A[int]]  # E: "A" expects no type arguments, but 1 given
+[out]
+
+[case testVarOrFuncAsType]
+import typing
+def f(): pass
+x = 1
+y = 0 # type: f
+z = 0 # type: x
+[out]
+main:4: error: Invalid type "__main__.f"
+main:5: error: Invalid type "__main__.x"
+
+[case testGlobalVarRedefinition]
+import typing
+class A: pass
+x = 0 # type: A
+x = 0 # type: A
+[out]
+main:4: error: Name 'x' already defined
+
+[case testLocalVarRedefinition]
+import typing
+class A: pass
+def f() -> None:
+  x = 0 # type: A
+  x = 0 # type: A
+[out]
+main:5: error: Name 'x' already defined
+
+[case testClassVarRedefinition]
+import typing
+class A:
+  x = 0 # type: object
+  x = 0 # type: object
+[out]
+main:4: error: Name 'x' already defined
+
+[case testMultipleClassDefinitions]
+import typing
+class A: pass
+class A: pass
+[out]
+main:3: error: Name 'A' already defined
+
+[case testMultipleMixedDefinitions]
+import typing
+x = 1
+def x(): pass
+class x: pass
+[out]
+main:3: error: Name 'x' already defined
+main:4: error: Name 'x' already defined
+
+[case testNameNotImported]
+import typing
+from m import y
+x
+[file m.py]
+x = y = 1
+[out]
+main:3: error: Name 'x' is not defined
+
+[case testMissingNameInImportFrom]
+import typing
+from m import y
+[file m.py]
+x = 1
+[out]
+main:2: error: Module 'm' has no attribute 'y'
+
+[case testMissingModule]
+import typing
+import m
+[out]
+main:2: error: Cannot find module named 'm'
+main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testMissingModule2]
+import typing
+from m import x
+[out]
+main:2: error: Cannot find module named 'm'
+main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testMissingModule3]
+import typing
+from m import *
+[out]
+main:2: error: Cannot find module named 'm'
+main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testMissingModuleRelativeImport]
+import typing
+import m
+[file m/__init__.py]
+from .x import y
+[out]
+tmp/m/__init__.py:1: error: Cannot find module named 'm.x'
+tmp/m/__init__.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testMissingModuleRelativeImport2]
+import typing
+import m.a
+[file m/__init__.py]
+[file m/a.py]
+from .x import y
+[out]
+tmp/m/a.py:1: error: Cannot find module named 'm.x'
+tmp/m/a.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+
+[case testModuleNotImported]
+import typing
+import _m
+_n.x
+[file _m.py]
+import _n
+[file _n.py]
+x = 1
+[out]
+main:3: error: Name '_n' is not defined
+
+[case testImportAsteriskPlusUnderscore]
+import typing
+from _m import *
+_x
+__x__
+[file _m.py]
+_x = __x__ = 1
+[out]
+main:3: error: Name '_x' is not defined
+main:4: error: Name '__x__' is not defined
+
+[case testRelativeImportAtTopLevelModule]
+from . import m
+[out]
+main:1: error: No parent module -- cannot perform relative import
+
+[case testRelativeImportAtTopLevelModule2]
+from .. import m
+[out]
+main:1: error: No parent module -- cannot perform relative import
+
+[case testUndefinedTypeWithQualifiedName]
+import typing
+import m
+def f() -> m.c: pass
+def g() -> n.c: pass
+[file m.py]
+[out]
+main:3: error: Name 'm.c' is not defined
+main:4: error: Name 'n' is not defined
+
+[case testMissingPackage]
+import typing
+import m.n
+[out]
+main:2: error: Cannot find module named 'm'
+main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Cannot find module named 'm.n'
+
+[case testMissingPackage]
+import typing
+from m.n import x
+from a.b import *
+[out]
+main:2: error: Cannot find module named 'm.n'
+main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:3: error: Cannot find module named 'a.b'
+
+[case testErrorInImportedModule]
+import m
+[file m.py]
+import typing
+x = y
+[out]
+tmp/m.py:2: error: Name 'y' is not defined
+
+[case testErrorInImportedModule2]
+import m.n
+[file m/__init__.py]
+[file m/n.py]
+import k
+[file k.py]
+import typing
+x = y
+[out]
+tmp/k.py:2: error: Name 'y' is not defined
+
+[case testPackageWithoutInitFile]
+import typing
+import m.n
+m.n.x
+[file m/n.py]
+x = 1
+[out]
+main:2: error: Cannot find module named 'm'
+main:2: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
+main:2: error: Cannot find module named 'm.n'
+
+[case testBreakOutsideLoop]
+break
+def f():
+  break
+[out]
+main:1: error: 'break' outside loop
+main:3: error: 'break' outside loop
+
+[case testContinueOutsideLoop]
+continue
+def f():
+  continue
+[out]
+main:1: error: 'continue' outside loop
+main:3: error: 'continue' outside loop
+
+[case testReturnOutsideFunction]
+def f(): pass
+return
+return 1
+[out]
+main:2: error: 'return' outside function
+main:3: error: 'return' outside function
+
+[case testYieldOutsideFunction]
+yield 1
+yield
+[out]
+main:1: error: 'yield' outside function
+main:2: error: 'yield' outside function
+
+[case testInvalidLvalues1]
+1 = 1
+[out]
+main:1: error: can't assign to literal
+
+[case testInvalidLvalues2]
+(1) = 1
+[out]
+main:1: error: can't assign to literal
+
+[case testInvalidLvalues3]
+(1, 1) = 1
+[out]
+main:1: error: can't assign to literal
+
+[case testInvalidLvalues4]
+[1, 1] = 1
+[out]
+main:1: error: can't assign to literal
+
+[case testInvalidLvalues5]
+() = 1
+[out]
+main:1: error: can't assign to ()
+
+[case testInvalidLvalues6]
+x = y = z = 1  # ok
+x, (y, 1) = 1
+[out]
+main:2: error: can't assign to literal
+
+[case testInvalidLvalues7]
+x, [y, 1] = 1
+[out]
+main:1: error: can't assign to literal
+
+[case testInvalidLvalues8]
+x, [y, [z, 1]] = 1
+[out]
+main:1: error: can't assign to literal
+
+[case testInvalidLvalues9]
+x, (y) = 1 # ok
+x, (y, (z, z)) = 1 # ok
+x, (y, (z, 1)) = 1
+[out]
+main:3: error: can't assign to literal
+
+[case testInvalidLvalues10]
+x + x = 1
+[out]
+main:1: error: can't assign to operator
+
+[case testInvalidLvalues11]
+-x = 1
+[out]
+main:1: error: can't assign to operator
+
+[case testInvalidLvalues12]
+1.1 = 1
+[out]
+main:1: error: can't assign to literal
+
+[case testInvalidLvalues13]
+'x' = 1
+[out]
+main:1: error: can't assign to literal
+
+[case testInvalidLvalues14]
+x() = 1
+[out]
+main:1: error: can't assign to function call
+
+[case testTwoStarExpressions]
+a, *b, *c = 1
+*a, (*b, c) = 1
+a, (*b, *c) = 1
+[*a, *b] = 1
+[out]
+main:1: error: Two starred expressions in assignment
+main:3: error: Two starred expressions in assignment
+main:4: error: Two starred expressions in assignment
+
+[case testTwoStarExpressionsInForStmt]
+z = 1
+for a, *b, *c in z:
+    pass
+for *a, (*b, c) in z:
+    pass
+for a, (*b, *c) in z:
+    pass
+for [*a, *b] in z:
+    pass
+[out]
+main:2: error: Two starred expressions in assignment
+main:6: error: Two starred expressions in assignment
+main:8: error: Two starred expressions in assignment
+
+[case testTwoStarExpressionsInGeneratorExpr]
+(a for a, *b, *c in [])
+(a for *a, (*b, c) in [])
+(a for a, (*b, *c) in [])
+[out]
+main:1: error: Name 'a' is not defined
+main:1: error: Two starred expressions in assignment
+main:3: error: Two starred expressions in assignment
+
+[case testStarExpressionRhs]
+b = 1
+c = 1
+d = 1
+a = *b
+[out]
+main:4: error: Can use starred expression only as assignment target
+
+[case testStarExpressionInExp]
+a = 1
+*a + 1
+[out]
+main:2: error: Can use starred expression only as assignment target
+
+[case testInvalidDel1]
+x = 1
+del x(1)  # E: can't delete function call
+[out]
+
+[case testInvalidDel2]
+x = 1
+del x + 1 # E: can't delete operator
+[out]
+
+[case testInvalidDel3]
+del z     # E: Name 'z' is not defined
+[out]
+
+[case testFunctionTvarScope]
+from typing import TypeVar
+t = TypeVar('t')
+def f(x: t) -> t: pass
+x = 0 # type: t
+[out]
+main:4: error: Invalid type "__main__.t"
+
+[case testClassTvarScope]
+from typing import Generic, TypeVar
+t = TypeVar('t')
+class c(Generic[t]): pass
+x = 0 # type: t
+[out]
+main:4: error: Invalid type "__main__.t"
+
+[case testExpressionRefersToTypeVariable]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class c(Generic[t]):
+    def f(self) -> None: x = t
+def f(y: t): x = t
+[out]
+main:4: error: 't' is a type variable and only valid in type context
+main:5: error: 't' is a type variable and only valid in type context
+
+[case testMissingSelf]
+import typing
+class A:
+  def f(): pass
+[out]
+main:3: error: Method must have at least one argument
+
+[case testInvalidBaseClass]
+import typing
+class A(B): pass
+[out]
+main:2: error: Name 'B' is not defined
+
+[case testSuperOutsideClass]
+class A: pass
+super().x
+def f() -> None: super().y
+[out]
+main:2: error: "super" used outside class
+main:3: error: "super" used outside class
+
+[case testMissingSelfInMethod]
+import typing
+class A:
+  def f() -> None: pass
+  def g(): pass
+[out]
+main:3: error: Method must have at least one argument
+main:4: error: Method must have at least one argument
+
+[case testMultipleMethodDefinition]
+import typing
+class A:
+  def f(self) -> None: pass
+  def g(self) -> None: pass
+  def f(self, x: object) -> None: pass
+[out]
+main:5: error: Name 'f' already defined
+
+[case testInvalidGlobalDecl]
+import typing
+def f() -> None:
+    global x
+    x = None
+[out]
+main:4: error: Name 'x' is not defined
+
+[case testInvalidNonlocalDecl]
+import typing
+def f():
+    def g() -> None:
+       nonlocal x
+       x = None
+[out]
+main:4: error: No binding for nonlocal 'x' found
+main:5: error: Name 'x' is not defined
+
+[case testNonlocalDeclNotMatchingGlobal]
+import typing
+x = None
+def f() -> None:
+    nonlocal x
+    x = None
+[out]
+main:4: error: No binding for nonlocal 'x' found
+main:5: error: Name 'x' is not defined
+
+[case testNonlocalDeclConflictingWithParameter]
+import typing
+def g():
+    x = None
+    def f(x) -> None:
+        nonlocal x
+        x = None
+[out]
+main:5: error: Name 'x' is already defined in local scope before nonlocal declaration
+
+[case testNonlocalDeclOutsideFunction]
+x = 2
+nonlocal x
+[out]
+main:2: error: nonlocal declaration not allowed at module level
+
+[case testGlobalAndNonlocalDecl]
+import typing
+x = 1
+def f():
+    x = 1
+    def g() -> None:
+       global x
+       nonlocal x
+       x = None
+[out]
+main:7: error: Name 'x' is nonlocal and global
+
+[case testNonlocalAndGlobalDecl]
+import typing
+x = 1
+def f():
+    x = 1
+    def g() -> None:
+       nonlocal x
+       global x
+       x = None
+[out]
+main:7: error: Name 'x' is nonlocal and global
+
+[case testNestedFunctionAndScoping]
+import typing
+def f(x) -> None:
+    def g(y):
+        z = x
+    z
+    y
+    x
+[out]
+main:5: error: Name 'z' is not defined
+main:6: error: Name 'y' is not defined
+
+[case testMultipleNestedFunctionDef]
+import typing
+def f(x) -> None:
+    def g(): pass
+    x = 1
+    def g(): pass
+[out]
+main:5: error: Name 'g' already defined
+
+[case testRedefinedOverloadedFunction]
+from typing import overload, Any
+def f() -> None:
+    @overload
+    def p(o: object) -> None: pass # no error
+    @overload
+    def p(o: Any) -> None: pass    # no error
+    x = 1
+    def p(): pass # fail
+[out]
+main:8: error: Name 'p' already defined
+
+[case testNestedFunctionInMethod]
+import typing
+class A:
+   def f(self) -> None:
+       def g() -> None:
+           x
+       y
+[out]
+main:5: error: Name 'x' is not defined
+main:6: error: Name 'y' is not defined
+
+[case testImportScope]
+import typing
+def f() -> None:
+    import x
+x.y # E: Name 'x' is not defined
+[file x.py]
+y = 1
+[out]
+
+[case testImportScope2]
+import typing
+def f() -> None:
+    from x import y
+    y
+y # E: Name 'y' is not defined
+[file x.py]
+y = 1
+[out]
+
+[case testImportScope3]
+import typing
+def f() -> None:
+    from x import *
+    y
+y # E: Name 'y' is not defined
+[file x.py]
+y = 1
+[out]
+
+[case testImportScope4]
+import typing
+class A:
+    from x import *
+    y
+y # E: Name 'y' is not defined
+[file x.py]
+y = 1
+[out]
+
+[case testScopeOfNestedClass]
+import typing
+def f():
+    class A: pass
+    A
+A # E: Name 'A' is not defined
+[out]
+
+[case testScopeOfNestedClass2]
+import typing
+class A:
+    class B: pass
+B # E: Name 'B' is not defined
+[out]
+
+[case testScopeOfNestedClass3]
+import typing
+class A:
+    def f(self):
+        class B: pass
+    B # E: Name 'B' is not defined
+B # E: Name 'B' is not defined
+[out]
+
+[case testInvalidNestedClassReferenceInDecl]
+import typing
+class A: pass
+foo = 0 # type: A.x      # E: Name 'A.x' is not defined
+[out]
+
+[case testTvarScopingWithNestedClass]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+s = TypeVar('s')
+class A(Generic[t]):
+    class B(Generic[s]):
+        x = 0 # type: A[s]
+        y = 0 # type: A[t]        # E: Invalid type "__main__.t"
+    z = 0 # type: A[s]            # E: Invalid type "__main__.s"
+    a = 0 # type: A[t]
+[out]
+
+[case testTestExtendPrimitives]
+class C(bool): pass # E: 'bool' is not a valid base class
+class A(int): pass # ok
+class B(float): pass # ok
+class D(str): pass # ok
+[builtins fixtures/primitives.pyi]
+[out]
+
+[case testCyclicInheritance]
+class A(A): pass # E: Cycle in inheritance hierarchy
+[out]
+
+[case testAssignToTypeDef]
+import typing
+class A: pass
+A = None # E: Invalid assignment target
+[out]
+
+[case testInvalidCastTargetSyntax]
+from typing import cast, TypeVar, Generic
+t = TypeVar('t')
+class C(Generic[t]): pass
+cast(str + str, None)    # E: Cast target is not a type
+cast(C[str][str], None)  # E: Cast target is not a type
+cast(C[str + str], None) # E: Cast target is not a type
+cast([int, str], None)   # E: Invalid type
+[out]
+
+[case testInvalidCastTargetType]
+from typing import cast
+x = 0
+cast(x, None)        # E: Invalid type "__main__.x"
+cast(t, None)        # E: Name 't' is not defined
+cast(__builtins__.x, None) # E: Name '__builtins__.x' is not defined
+[out]
+
+[case testInvalidCastTargetType2]
+from typing import cast
+x = 0
+cast(str[str], None) # E: "str" expects no type arguments, but 1 given
+[out]
+
+[case testInvalidNumberOfArgsToCast]
+from typing import cast
+cast(str) # E: 'cast' expects 2 arguments
+cast(str, None, None) # E: 'cast' expects 2 arguments
+[out]
+
+[case testInvalidKindsOfArgsToCast]
+from typing import cast
+cast(str, *None) # E: 'cast' must be called with 2 positional arguments
+cast(str, target=None) # E: 'cast' must be called with 2 positional arguments
+[out]
+
+[case testInvalidAnyCall]
+from typing import Any
+Any(str, None)  # E: Any(...) is no longer supported. Use cast(Any, ...) instead
+Any(arg=str)  # E: Any(...) is no longer supported. Use cast(Any, ...) instead
+[out]
+
+[case testTypeListAsType]
+def f(x:[int, str]) -> None: # E: Invalid type
+    pass
+[out]
+
+[case testInvalidFunctionType]
+from typing import Callable
+x = None # type: Callable[int, str]
+y = None # type: Callable[int]
+z = None # type: Callable[int, int, int]
+[out]
+main:2: error: The first argument to Callable must be a list of types or "..."
+main:3: error: Invalid function type
+main:4: error: Invalid function type
+
+[case testAbstractGlobalFunction]
+import typing
+from abc import abstractmethod
+ at abstractmethod
+def foo(): pass
+[out]
+main:3: error: 'abstractmethod' used with a non-method
+
+[case testAbstractNestedFunction]
+import typing
+from abc import abstractmethod
+def g() -> None:
+  @abstractmethod
+  def foo(): pass
+[out]
+main:4: error: 'abstractmethod' used with a non-method
+
+[case testInvalidTypeDeclaration]
+import typing
+def f(): pass
+f() = 1 # type: int
+[out]
+main:3: error: can't assign to function call
+
+[case testIndexedAssignmentWithTypeDeclaration]
+import typing
+None[1] = 1 # type: int
+[out]
+main:2: error: Unexpected type declaration
+
+[case testNonSelfMemberAssignmentWithTypeDeclaration]
+import typing
+None.x = 1 # type: int
+[out]
+main:2: error: Type cannot be declared in assignment to non-self attribute
+
+[case testNonSelfMemberAssignmentWithTypeDeclarationInMethod]
+import typing
+class A:
+  def f(self, x) -> None:
+    x.y = 1 # type: int
+[out]
+main:4: error: Type cannot be declared in assignment to non-self attribute
+
+[case testInvalidTypeInTypeApplication]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class A(Generic[t]): pass
+A[TypeVar] # E: Invalid type "typing.TypeVar"
+[out]
+
+[case testInvalidTypeInTypeApplication2]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class A(Generic[t]): pass
+A[1] # E: Type expected within [...]
+[out]
+
+[case testVariableDeclWithInvalidNumberOfTypes]
+x, y = 1, 2 # type: int, str, int # E: Incompatible number of tuple items
+[out]
+
+[case testVariableDeclWithInvalidNumberOfTypesNested]
+x, (y, z) = 1, (2, 3) # type: int, (str, int, int) # E: Incompatible number of tuple items
+[out]
+
+[case testVariableDeclWithInvalidNumberOfTypesNested2]
+x, (y, z) = 1, (2, 3) # type: int, (str, ) # E: Incompatible number of tuple items
+[out]
+
+[case testVariableDeclWithInvalidNumberOfTypesNested3]
+x, (y, z) = 1, (2, 3) # type: int, str # E: Tuple type expected for multiple variables
+[out]
+
+[case testVariableDeclWithInvalidNumberOfTypesNested4]
+x, (y, z) = 1, (2, 3) # type: int, str, int # E: Incompatible number of tuple items
+[out]
+
+[case testVariableDeclWithInvalidNumberOfTypesNested5]
+x, (y, ) = 1, (2, ) # type: int, str # E: Tuple type expected for multiple variables
+[out]
+
+[case testVariableDeclWithInvalidType]
+x, y = 1, 2 # type: int # E: Tuple type expected for multiple variables
+[out]
+
+[case testInvalidLvalueWithExplicitType]
+a = 1
+a() = None # type: int  # E: can't assign to function call
+[out]
+
+[case testInvalidLvalueWithExplicitType2]
+a = 1
+a[1] = None # type: int # E: Unexpected type declaration
+a.x = None # type: int \
+    # E: Type cannot be declared in assignment to non-self attribute
+[out]
+
+[case testInvalidLvalueWithExplicitType3]
+a = 1
+a.y, a.x = None, None # type: int, int \
+    # E: Type cannot be declared in assignment to non-self attribute
+a[1], a[2] = None, None # type: int, int \
+    # E: Unexpected type declaration
+[out]
+
+[case testMissingGenericImport]
+from typing import TypeVar
+T = TypeVar('T')
+class A(Generic[T]): pass
+[out]
+main:3: error: Name 'Generic' is not defined
+
+[case testInvalidTypeWithinGeneric]
+from typing import Generic
+class A(Generic[int]): pass # E: Free type variable expected in Generic[...]
+[out]
+
+[case testInvalidTypeWithinNestedGenericClass]
+from typing import Generic, TypeVar
+T = TypeVar('T')
+class A(Generic[T]):
+    class B(Generic[T]): pass \
+          # E: Free type variable expected in Generic[...]
+[out]
+
+[case testIncludingGenericTwiceInBaseClassList]
+from typing import Generic, TypeVar
+T = TypeVar('T')
+S = TypeVar('S')
+class A(Generic[T], Generic[S]): pass \
+      # E: Duplicate Generic in bases
+[out]
+
+[case testInvalidMetaclass]
+class A(metaclass=x): pass # E: Name 'x' is not defined
+[out]
+
+[case testInvalidQualifiedMetaclass]
+import abc
+class A(metaclass=abc.Foo): pass # E: Name 'abc.Foo' is not defined
+[out]
+
+[case testNonClassMetaclass]
+def f(): pass
+class A(metaclass=f): pass # E: Invalid metaclass 'f'
+[out]
+
+[case testInvalidTypevarArguments]
+from typing import TypeVar
+a = TypeVar()       # E: Too few arguments for TypeVar()
+b = TypeVar(x='b')  # E: TypeVar() expects a string literal as first argument
+c = TypeVar(1)      # E: TypeVar() expects a string literal as first argument
+d = TypeVar('D')    # E: String argument 1 'D' to TypeVar(...) does not match variable name 'd'
+e = TypeVar('e', int, str, x=1) # E: Unexpected argument to TypeVar(): x
+f = TypeVar('f', (int, str)) # E: Type expected
+g = TypeVar('g', x=(int, str)) # E: Unexpected argument to TypeVar(): x
+h = TypeVar('h', bound=1) # E: TypeVar 'bound' must be a type
+[out]
+
+[case testMoreInvalidTypevarArguments]
+from typing import TypeVar
+T = TypeVar('T', int, str, bound=bool) # E: TypeVar cannot have both values and an upper bound
+S = TypeVar('S', covariant=True, contravariant=True) \
+    # E: TypeVar cannot be both covariant and contravariant
+[builtins fixtures/bool.pyi]
+
+[case testInvalidTypevarValues]
+from typing import TypeVar
+b = TypeVar('b', *[int]) # E: Unexpected argument to TypeVar()
+c = TypeVar('c', int, 2) # E: Type expected
+[out]
+
+[case testObsoleteTypevarValuesSyntax]
+from typing import TypeVar
+a = TypeVar('a', values=(int, str))
+[out]
+main:2: error: TypeVar 'values' argument not supported
+main:2: error: Use TypeVar('T', t, ...) instead of TypeVar('T', values=(t, ...))
+
+[case testLocalTypevarScope]
+from typing import TypeVar
+def f() -> None:
+    T = TypeVar('T')
+def g(x: T) -> None: pass # E: Name 'T' is not defined
+[out]
+
+[case testClassTypevarScope]
+from typing import TypeVar
+class A:
+    T = TypeVar('T')
+def g(x: T) -> None: pass # E: Name 'T' is not defined
+[out]
+
+[case testRedefineVariableAsTypevar]
+from typing import TypeVar
+x = 0
+x = TypeVar('x') # E: Cannot redefine 'x' as a type variable
+[out]
+
+[case testTypevarWithType]
+from typing import TypeVar
+x = TypeVar('x') # type: int # E: Cannot declare the type of a type variable
+[out]
+
+[case testRedefineTypevar]
+from typing import TypeVar
+t = TypeVar('t')
+t = 1 # E: Invalid assignment target
+[out]
+
+[case testRedefineTypevar2]
+from typing import TypeVar
+t = TypeVar('t')
+def t(): pass # E: Name 't' already defined
+[out]
+
+[case testRedefineTypevar3]
+from typing import TypeVar
+t = TypeVar('t')
+class t: pass # E: Name 't' already defined
+[out]
+
+[case testRedefineTypevar4]
+from typing import TypeVar
+t = TypeVar('t')
+from typing import Generic as t # E: Name 't' already defined
+[out]
+
+[case testInvalidStrLiteralType]
+def f(x: 'foo'): pass # E: Name 'foo' is not defined
+[out]
+
+[case testInvalidStrLiteralType2]
+def f(x: 'int['): pass # E: syntax error in type comment
+[out]
+
+[case testInconsistentOverload]
+from typing import overload
+def dec(x): pass
+ at overload
+def f(): pass
+ at dec  # E: 'overload' decorator expected
+def f(): pass
+[out]
+
+[case testInconsistentOverload2]
+from typing import overload
+def dec(x): pass
+ at dec  # E: 'overload' decorator expected
+def f(): pass
+ at overload
+def f(): pass
+[out]
+
+[case testMissingOverloadDecorator]
+from typing import overload
+def dec(x): pass
+ at dec  # E: 'overload' decorator expected
+def f(): pass
+ at dec  # E: 'overload' decorator expected
+def f(): pass
+[out]
+
+[case testIncompatibleSignatureInComment]
+import typing
+def f(): # type: (int) -> int
+  pass
+def g(x): # type: () -> int
+  pass
+[out]
+main:2: error: Type signature has too many arguments
+main:4: error: Type signature has too few arguments
+
+[case testStaticmethodAndNonMethod]
+import typing
+ at staticmethod
+def f(): pass
+class A:
+  def g(self) -> None:
+    @staticmethod
+    def h(): pass
+[builtins fixtures/staticmethod.pyi]
+[out]
+main:2: error: 'staticmethod' used with a non-method
+main:6: error: 'staticmethod' used with a non-method
+
+[case testClassmethodAndNonMethod]
+import typing
+ at classmethod
+def f(): pass
+class A:
+  def g(self) -> None:
+    @classmethod
+    def h(): pass
+[builtins fixtures/classmethod.pyi]
+[out]
+main:2: error: 'classmethod' used with a non-method
+main:6: error: 'classmethod' used with a non-method
+
+[case testNonMethodProperty]
+import typing
+ at property  # E: 'property' used with a non-method
+def f() -> int: pass
+[builtins fixtures/property.pyi]
+[out]
+
+[case testInvalidArgCountForProperty]
+import typing
+class A:
+    @property
+    def f(self, x) -> int: pass  # E: Too many arguments
+    @property
+    def g() -> int: pass   # E: Method must have at least one argument
+[builtins fixtures/property.pyi]
+[out]
+
+[case testOverloadedProperty]
+from typing import overload
+class A:
+    @overload  # E: Decorated property not supported
+    @property
+    def f(self) -> int: pass
+    @property  # E: Decorated property not supported
+    @overload
+    def f(self) -> int: pass
+[builtins fixtures/property.pyi]
+[out]
+
+[case testOverloadedProperty2]
+from typing import overload
+class A:
+    @overload
+    def f(self) -> int: pass
+    @property  # E: Decorated property not supported
+    @overload
+    def f(self) -> int: pass
+[builtins fixtures/property.pyi]
+[out]
+
+[case testDecoratedProperty]
+import typing
+def dec(f): pass
+class A:
+    @dec  # E: Decorated property not supported
+    @property
+    def f(self) -> int: pass
+    @property  # E: Decorated property not supported
+    @dec
+    def g(self) -> int: pass
+[builtins fixtures/property.pyi]
+[out]
+
+[case testImportTwoModulesWithSameNameInFunction]
+import typing
+def f() -> None:
+    import x
+    import y as x # E: Name 'x' already defined
+    x.y
+[file x.py]
+y = 1
+[file y.py]
+[out]
+
+[case testImportTwoModulesWithSameNameInGlobalContext]
+import typing
+import x
+import y as x # E: Name 'x' already defined
+x.y
+[file x.py]
+y = 1
+[file y.py]
+[out]
+
+[case testListTypeAliasWithoutImport]
+import typing
+def f() -> List[int]: pass
+[builtins fixtures/list.pyi]
+[out]
+main:2: error: Name 'List' is not defined
+
+[case testImportObsoleteTypingFunction]
+from typing import Function # E: Module 'typing' has no attribute 'Function' (it's now called 'typing.Callable')
+from _m import Function # E: Module '_m' has no attribute 'Function'
+[file _m.py]
+[out]
+
+[case testTypeRefresToObsoleteTypingFunction]
+import typing
+import _m
+def f(x: typing.Function[[], None]) -> None: pass
+def g(x: _m.Function[[], None]) -> None: pass
+[file _m.py]
+[out]
+main:3: error: Name 'typing.Function' is not defined (it's now called 'typing.Callable')
+--'
+main:4: error: Name '_m.Function' is not defined
+
+[case testUnqualifiedNameRefersToObsoleteTypingFunction]
+x = None # type: Function[[], None]
+[out]
+main:1: error: Name 'Function' is not defined
+main:1: note: (Did you mean 'typing.Callable'?)
+
+[case testInvalidWithTarget]
+def f(): pass
+with f() as 1: pass  # E: can't assign to literal
+[out]
+
+[case testUseObsoleteNameForTypeVar]
+from typing import typevar
+t = typevar('t')
+[out]
+main:1: error: Module 'typing' has no attribute 'typevar' (it's now called 'typing.TypeVar')
+--' (this fixes syntax highlighting)
+
+[case testUseObsoleteNameForTypeVar2]
+t = typevar('t')
+[out]
+main:1: error: Name 'typevar' is not defined
+main:1: note: (Did you mean 'typing.TypeVar'?)
+
+[case testUseObsoleteNameForTypeVar3]
+import typing
+t = typing.typevar('t')
+[out]
+main:2: error: Module 'typing' has no attribute 'typevar' (it's now called 'typing.TypeVar')
+--' (work around syntax highlighting :-/)
+
+[case testInvalidTypeAnnotation]
+import typing
+def f() -> None:
+    1[2] = 1  # type: int
+[out]
+main:3: error: Unexpected type declaration
+
+[case testInvalidTypeAnnotation2]
+import typing
+def f() -> None:
+    f() = 1  # type: int
+[out]
+main:3: error: can't assign to function call
+
+[case testInvalidReferenceToAttributeOfOuterClass]
+class A:
+    class X: pass
+    class B:
+        y = X  # E: Name 'X' is not defined
+[out]
+
+[case testStubPackage]
+from m import x
+from m import y # E: Module 'm' has no attribute 'y'
+[file m/__init__.pyi]
+x = 1
+[out]
+
+[case testStubPackageSubModule]
+from m import x
+from m import y # E: Module 'm' has no attribute 'y'
+from m.m2 import y
+from m.m2 import z # E: Module 'm.m2' has no attribute 'z'
+[file m/__init__.pyi]
+x = 1
+[file m/m2.pyi]
+y = 1
+[out]
+
+[case testMissingStubForThirdPartyModule]
+import nosexcover
+[out]
+main:1: error: No library stub file for module 'nosexcover'
+main:1: note: (Stub files are from https://github.com/python/typeshed)
+
+[case testMissingStubForStdLibModule]
+import tabnanny
+[out]
+main:1: error: No library stub file for standard library module 'tabnanny'
+main:1: note: (Stub files are from https://github.com/python/typeshed)
+
+[case testMissingStubForTwoModules]
+import tabnanny
+import xdrlib
+[out]
+main:1: error: No library stub file for standard library module 'tabnanny'
+main:1: note: (Stub files are from https://github.com/python/typeshed)
+main:2: error: No library stub file for standard library module 'xdrlib'
+
+[case testListComprehensionSpecialScoping]
+class A:
+    x = 1
+    y = 1
+    z = 1
+    [x for i in z if y]
+[out]
+main:5: error: Name 'x' is not defined
+main:5: error: Name 'y' is not defined
+
+[case testTypeRedeclarationNoSpuriousWarnings]
+from typing import Tuple
+a = 1  # type: int
+a = 's'  # type: str
+a = ('spam', 'spam', 'eggs', 'spam')  # type: Tuple[str]
+
+[out]
+main:3: error: Name 'a' already defined
+main:4: error: Name 'a' already defined
diff --git a/test-data/unit/semanal-expressions.test b/test-data/unit/semanal-expressions.test
new file mode 100644
index 0000000..cec2a3c
--- /dev/null
+++ b/test-data/unit/semanal-expressions.test
@@ -0,0 +1,395 @@
+[case testLiterals]
+(1, 'x', 1.1, 1.1j)
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    TupleExpr:1(
+      IntExpr(1)
+      StrExpr(x)
+      FloatExpr(1.1)
+      ComplexExpr(1.1j))))
+
+[case testMemberExpr]
+x = 1
+x.y
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  ExpressionStmt:2(
+    MemberExpr:2(
+      NameExpr(x [__main__.x])
+      y)))
+
+[case testIndexExpr]
+x = y = 1
+x[y]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  ExpressionStmt:2(
+    IndexExpr:2(
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y]))))
+
+[case testBinaryOperations]
+x = y = 1
+x + y
+x | y
+x is not y
+x == y
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  ExpressionStmt:2(
+    OpExpr:2(
+      +
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y])))
+  ExpressionStmt:3(
+    OpExpr:3(
+      |
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y])))
+  ExpressionStmt:4(
+    ComparisonExpr:4(
+      is not
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y])))
+  ExpressionStmt:5(
+    ComparisonExpr:5(
+      ==
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y]))))
+
+[case testUnaryOperations]
+x = 1
+-x
+~x
++x
+not x
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  ExpressionStmt:2(
+    UnaryExpr:2(
+      -
+      NameExpr(x [__main__.x])))
+  ExpressionStmt:3(
+    UnaryExpr:3(
+      ~
+      NameExpr(x [__main__.x])))
+  ExpressionStmt:4(
+    UnaryExpr:4(
+      +
+      NameExpr(x [__main__.x])))
+  ExpressionStmt:5(
+    UnaryExpr:5(
+      not
+      NameExpr(x [__main__.x]))))
+
+[case testSlices]
+x = y = z = 1
+x[y:z:x]
+x[:]
+x[:y]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y])
+      NameExpr(z* [__main__.z]))
+    IntExpr(1))
+  ExpressionStmt:2(
+    IndexExpr:2(
+      NameExpr(x [__main__.x])
+      SliceExpr:-1(
+        NameExpr(y [__main__.y])
+        NameExpr(z [__main__.z])
+        NameExpr(x [__main__.x]))))
+  ExpressionStmt:3(
+    IndexExpr:3(
+      NameExpr(x [__main__.x])
+      SliceExpr:-1(
+        <empty>
+        <empty>)))
+  ExpressionStmt:4(
+    IndexExpr:4(
+      NameExpr(x [__main__.x])
+      SliceExpr:-1(
+        <empty>
+        NameExpr(y [__main__.y])))))
+
+[case testTupleLiteral]
+x = y = 1
+x, y
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  ExpressionStmt:2(
+    TupleExpr:2(
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y]))))
+
+[case testListLiteral]
+x = y = 1
+([], [x, y])
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  ExpressionStmt:2(
+    TupleExpr:2(
+      ListExpr:2()
+      ListExpr:2(
+        NameExpr(x [__main__.x])
+        NameExpr(y [__main__.y])))))
+
+[case testDictLiterals]
+x = y = 1
+{ x : y, y : x }
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  ExpressionStmt:2(
+    DictExpr:2(
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y])
+      NameExpr(y [__main__.y])
+      NameExpr(x [__main__.x]))))
+
+[case testListComprehension]
+a = 0
+([x + 1 for x in a])
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a* [__main__.a])
+    IntExpr(0))
+  ExpressionStmt:2(
+    ListComprehension:2(
+      GeneratorExpr:2(
+        OpExpr:2(
+          +
+          NameExpr(x [l])
+          IntExpr(1))
+        NameExpr(x* [l])
+        NameExpr(a [__main__.a])))))
+
+[case testListComprehensionInFunction]
+def f(a) -> None:
+    [x for x in a]
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(a))
+    def (a: Any)
+    Block:1(
+      ExpressionStmt:2(
+        ListComprehension:2(
+          GeneratorExpr:2(
+            NameExpr(x [l])
+            NameExpr(x* [l])
+            NameExpr(a [l])))))))
+
+[case testListComprehensionWithCondition]
+a = 0
+a = [x for x in a if x]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a* [__main__.a])
+    IntExpr(0))
+  AssignmentStmt:2(
+    NameExpr(a [__main__.a])
+    ListComprehension:2(
+      GeneratorExpr:2(
+        NameExpr(x [l])
+        NameExpr(x* [l])
+        NameExpr(a [__main__.a])
+        NameExpr(x [l])))))
+
+[case testSetComprehension]
+a = 0
+({x + 1 for x in a})
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a* [__main__.a])
+    IntExpr(0))
+  ExpressionStmt:2(
+    SetComprehension:2(
+      GeneratorExpr:2(
+        OpExpr:2(
+          +
+          NameExpr(x [l])
+          IntExpr(1))
+        NameExpr(x* [l])
+        NameExpr(a [__main__.a])))))
+
+[case testSetComprehensionWithCondition]
+a = 0
+a = {x for x in a if x}
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a* [__main__.a])
+    IntExpr(0))
+  AssignmentStmt:2(
+    NameExpr(a [__main__.a])
+    SetComprehension:2(
+      GeneratorExpr:2(
+        NameExpr(x [l])
+        NameExpr(x* [l])
+        NameExpr(a [__main__.a])
+        NameExpr(x [l])))))
+
+[case testDictionaryComprehension]
+a = 0
+({x: x + 1 for x in a})
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a* [__main__.a])
+    IntExpr(0))
+  ExpressionStmt:2(
+    DictionaryComprehension:2(
+      NameExpr(x [l])
+      OpExpr:2(
+        +
+        NameExpr(x [l])
+        IntExpr(1))
+      NameExpr(x* [l])
+      NameExpr(a [__main__.a]))))
+
+[case testDictionaryComprehensionWithCondition]
+a = 0
+a = {x: x + 1 for x in a if x}
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a* [__main__.a])
+    IntExpr(0))
+  AssignmentStmt:2(
+    NameExpr(a [__main__.a])
+    DictionaryComprehension:2(
+      NameExpr(x [l])
+      OpExpr:2(
+        +
+        NameExpr(x [l])
+        IntExpr(1))
+      NameExpr(x* [l])
+      NameExpr(a [__main__.a])
+      NameExpr(x [l]))))
+
+[case testGeneratorExpression]
+a = 0
+(x for x in a)
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a* [__main__.a])
+    IntExpr(0))
+  ExpressionStmt:2(
+    GeneratorExpr:2(
+      NameExpr(x [l])
+      NameExpr(x* [l])
+      NameExpr(a [__main__.a]))))
+
+[case testGeneratorExpressionNestedIndex]
+a = 0
+(x for x, (y, z) in a)
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(a* [__main__.a])
+    IntExpr(0))
+  ExpressionStmt:2(
+    GeneratorExpr:2(
+      NameExpr(x [l])
+      TupleExpr:2(
+        NameExpr(x* [l])
+        TupleExpr:2(
+          NameExpr(y* [l])
+          NameExpr(z* [l])))
+      NameExpr(a [__main__.a]))))
+
+[case testLambda]
+x = 0
+lambda: x
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(0))
+  ExpressionStmt:2(
+    FuncExpr:2(
+      Block:2(
+        ReturnStmt:2(
+          NameExpr(x [__main__.x]))))))
+
+[case testLambdaWithArguments]
+lambda x, y: x + y
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    FuncExpr:1(
+      Args(
+        Var(x)
+        Var(y))
+      Block:1(
+        ReturnStmt:1(
+          OpExpr:1(
+            +
+            NameExpr(x [l])
+            NameExpr(y [l])))))))
+
+[case testConditionalExpression]
+int if None else str
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    ConditionalExpr:1(
+      Condition(
+        NameExpr(None [builtins.None]))
+      NameExpr(int [builtins.int])
+      NameExpr(str [builtins.str]))))
+
+[case testDictWithKeywordArgs]
+dict(a=1, b=str())
+[builtins fixtures/dict.pyi]
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    DictExpr:1(
+      StrExpr(a)
+      IntExpr(1)
+      StrExpr(b)
+      CallExpr:1(
+        NameExpr(str [builtins.str])
+        Args()))))
diff --git a/test-data/unit/semanal-modules.test b/test-data/unit/semanal-modules.test
new file mode 100644
index 0000000..7a00e66
--- /dev/null
+++ b/test-data/unit/semanal-modules.test
@@ -0,0 +1,877 @@
+-- NOTE: If a module has a name starting or ending with _, it is skipped in
+--       output.
+
+[case testImport]
+import x
+x.y
+[file x.py]
+y = 1
+[out]
+MypyFile:1(
+  Import:1(x)
+  ExpressionStmt:2(
+    MemberExpr:2(
+      NameExpr(x)
+      y [x.y])))
+MypyFile:1(
+  tmp/x.py
+  AssignmentStmt:1(
+    NameExpr(y* [x.y])
+    IntExpr(1)))
+
+[case testImportedNameInType]
+import m
+x = None # type: m.c
+[file m.py]
+class c: pass
+[out]
+MypyFile:1(
+  Import:1(m)
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    m.c))
+MypyFile:1(
+  tmp/m.py
+  ClassDef:1(
+    c
+    PassStmt:1()))
+
+[case testImportFrom]
+from m import y
+x = y
+[file m.py]
+y = 1
+[out]
+MypyFile:1(
+  ImportFrom:1(m, [y])
+  AssignmentStmt:2(
+    NameExpr(x* [__main__.x])
+    NameExpr(y [m.y])))
+MypyFile:1(
+  tmp/m.py
+  AssignmentStmt:1(
+    NameExpr(y* [m.y])
+    IntExpr(1)))
+
+[case testImportFromType]
+from m import c
+x = None # type: c
+[file m.py]
+class c: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(m, [c])
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    m.c))
+MypyFile:1(
+  tmp/m.py
+  ClassDef:1(
+    c
+    PassStmt:1()))
+
+[case testImportMultiple]
+import _m, _n
+_m.x, _n.y
+[file _m.py]
+x = 1
+[file _n.py]
+y = 2
+[out]
+MypyFile:1(
+  Import:1(_m, _n)
+  ExpressionStmt:2(
+    TupleExpr:2(
+      MemberExpr:2(
+        NameExpr(_m)
+        x [_m.x])
+      MemberExpr:2(
+        NameExpr(_n)
+        y [_n.y]))))
+
+[case testImportAs]
+import _m as n
+n.x
+[file _m.py]
+x = 1
+[out]
+MypyFile:1(
+  Import:1(_m : n)
+  ExpressionStmt:2(
+    MemberExpr:2(
+      NameExpr(n [_m])
+      x [_m.x])))
+
+[case testImportFromMultiple]
+from _m import x, y
+x, y
+[file _m.py]
+x = y = 1
+[out]
+MypyFile:1(
+  ImportFrom:1(_m, [x, y])
+  ExpressionStmt:2(
+    TupleExpr:2(
+      NameExpr(x [_m.x])
+      NameExpr(y [_m.y]))))
+
+[case testImportFromAs]
+from _m import y as z
+z
+[file _m.py]
+y = 1
+[out]
+MypyFile:1(
+  ImportFrom:1(_m, [y : z])
+  ExpressionStmt:2(
+    NameExpr(z [_m.y])))
+
+[case testAccessImportedName]
+from m import x
+y = x
+[file m.py]
+from _n import x
+[file _n.py]
+x = 1
+[out]
+MypyFile:1(
+  ImportFrom:1(m, [x])
+  AssignmentStmt:2(
+    NameExpr(y* [__main__.y])
+    NameExpr(x [_n.x])))
+MypyFile:1(
+  tmp/m.py
+  ImportFrom:1(_n, [x]))
+
+[case testAccessImportedName2]
+import _m
+y = _m.x
+[file _m.py]
+from _n import x
+[file _n.py]
+x = 1
+[out]
+MypyFile:1(
+  Import:1(_m)
+  AssignmentStmt:2(
+    NameExpr(y* [__main__.y])
+    MemberExpr:2(
+      NameExpr(_m)
+      x [_n.x])))
+
+[case testAccessingImportedNameInType]
+from _m import c
+x = None # type: c
+[file _m.py]
+from _n import c
+[file _n.py]
+class c: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(_m, [c])
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    _n.c))
+
+[case testAccessingImportedNameInType2]
+import _m
+x = None # type: _m.c
+[file _m.py]
+from _n import c
+[file _n.py]
+class c: pass
+[out]
+MypyFile:1(
+  Import:1(_m)
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    _n.c))
+
+[case testAccessingImportedModule]
+from _m import _n
+_n.x
+[file _m.py]
+import _n
+[file _n.py]
+x = 1
+[out]
+MypyFile:1(
+  ImportFrom:1(_m, [_n])
+  ExpressionStmt:2(
+    MemberExpr:2(
+      NameExpr(_n)
+      x [_n.x])))
+
+[case testAccessingImportedModule]
+import _m
+_m._n.x
+[file _m.py]
+import _n
+[file _n.py]
+x = 1
+[out]
+MypyFile:1(
+  Import:1(_m)
+  ExpressionStmt:2(
+    MemberExpr:2(
+      MemberExpr:2(
+        NameExpr(_m)
+        _n)
+      x [_n.x])))
+
+[case testAccessTypeViaDoubleIndirection]
+from _m import c
+a = None # type: c
+[file _m.py]
+from _n import c
+[file _n.py]
+class c: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(_m, [c])
+  AssignmentStmt:2(
+    NameExpr(a [__main__.a])
+    NameExpr(None [builtins.None])
+    _n.c))
+
+[case testAccessTypeViaDoubleIndirection2]
+import _m
+a = None # type: _m.c
+[file _m.py]
+from _n import c
+[file _n.py]
+class c: pass
+[out]
+MypyFile:1(
+  Import:1(_m)
+  AssignmentStmt:2(
+    NameExpr(a [__main__.a])
+    NameExpr(None [builtins.None])
+    _n.c))
+
+[case testImportAsterisk]
+from _m import *
+x, y
+[file _m.py]
+x = y = 1
+[out]
+MypyFile:1(
+  ImportAll:1(_m)
+  ExpressionStmt:2(
+    TupleExpr:2(
+      NameExpr(x [_m.x])
+      NameExpr(y [_m.y]))))
+
+[case testImportAsteriskAndImportedNames]
+from _m import *
+n_.x, y
+[file _m.py]
+import n_
+from n_ import y
+[file n_.py]
+x = y = 1
+[out]
+MypyFile:1(
+  ImportAll:1(_m)
+  ExpressionStmt:2(
+    TupleExpr:2(
+      MemberExpr:2(
+        NameExpr(n_)
+        x [n_.x])
+      NameExpr(y [n_.y]))))
+
+[case testImportAsteriskAndImportedNamesInTypes]
+from _m import *
+x = None # type: n_.c
+y = None # type: d
+[file _m.py]
+import n_
+from n_ import d
+[file n_.py]
+class c: pass
+class d: pass
+[out]
+MypyFile:1(
+  ImportAll:1(_m)
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    n_.c)
+  AssignmentStmt:3(
+    NameExpr(y [__main__.y])
+    NameExpr(None [builtins.None])
+    n_.d))
+
+[case testModuleInSubdir]
+import _m
+_m.x
+[file _m/__init__.py]
+x = 1
+[out]
+MypyFile:1(
+  Import:1(_m)
+  ExpressionStmt:2(
+    MemberExpr:2(
+      NameExpr(_m)
+      x [_m.x])))
+
+[case testNestedModules]
+import m.n
+m.n.x, m.y
+[file m/__init__.py]
+y = 1
+[file m/n.py]
+x = 1
+[out]
+MypyFile:1(
+  Import:1(m.n)
+  ExpressionStmt:2(
+    TupleExpr:2(
+      MemberExpr:2(
+        MemberExpr:2(
+          NameExpr(m)
+          n [m.n])
+        x [m.n.x])
+      MemberExpr:2(
+        NameExpr(m)
+        y [m.y]))))
+MypyFile:1(
+  tmp/m/n.py
+  AssignmentStmt:1(
+    NameExpr(x* [m.n.x])
+    IntExpr(1)))
+
+[case testImportFromSubmodule]
+from m._n import x
+x
+[file m/__init__.py]
+[file m/_n.py]
+x = 1
+[out]
+MypyFile:1(
+  ImportFrom:1(m._n, [x])
+  ExpressionStmt:2(
+    NameExpr(x [m._n.x])))
+
+[case testImportAllFromSubmodule]
+from m._n import *
+x, y
+[file m/__init__.py]
+[file m/_n.py]
+x = y = 1
+[out]
+MypyFile:1(
+  ImportAll:1(m._n)
+  ExpressionStmt:2(
+    TupleExpr:2(
+      NameExpr(x [m._n.x])
+      NameExpr(y [m._n.y]))))
+
+[case testSubmodulesAndTypes]
+import m._n
+x = None # type: m._n.c
+[file m/__init__.py]
+[file m/_n.py]
+class c: pass
+[out]
+MypyFile:1(
+  Import:1(m._n)
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    m._n.c))
+
+[case testSubmodulesAndTypes]
+from m._n import c
+x = None # type: c
+[file m/__init__.py]
+[file m/_n.py]
+class c: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(m._n, [c])
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    m._n.c))
+
+[case testFromPackageImportModule]
+from m import _n
+_n.x
+[file m/__init__.py]
+[file m/_n.py]
+x = 1
+[out]
+MypyFile:1(
+  ImportFrom:1(m, [_n])
+  ExpressionStmt:2(
+    MemberExpr:2(
+      NameExpr(_n [m._n])
+      x [m._n.x])))
+
+[case testDeeplyNestedModule]
+import m.n.k
+m.n.k.x
+m.n.b
+m.a
+[file m/__init__.py]
+a = 1
+[file m/n/__init__.py]
+b = 1
+[file m/n/k.py]
+x = 1
+[out]
+MypyFile:1(
+  Import:1(m.n.k)
+  ExpressionStmt:2(
+    MemberExpr:2(
+      MemberExpr:2(
+        MemberExpr:2(
+          NameExpr(m)
+          n [m.n])
+        k [m.n.k])
+      x [m.n.k.x]))
+  ExpressionStmt:3(
+    MemberExpr:3(
+      MemberExpr:3(
+        NameExpr(m)
+        n [m.n])
+      b [m.n.b]))
+  ExpressionStmt:4(
+    MemberExpr:4(
+      NameExpr(m)
+      a [m.a])))
+MypyFile:1(
+  tmp/m/n/k.py
+  AssignmentStmt:1(
+    NameExpr(x* [m.n.k.x])
+    IntExpr(1)))
+
+[case testImportInSubmodule]
+import m._n
+y = m._n.x
+[file m/__init__.py]
+[file m/_n.py]
+from m._k import x
+[file m/_k.py]
+x = 1
+[out]
+MypyFile:1(
+  Import:1(m._n)
+  AssignmentStmt:2(
+    NameExpr(y* [__main__.y])
+    MemberExpr:2(
+      MemberExpr:2(
+        NameExpr(m)
+        _n [m._n])
+      x [m._k.x])))
+
+[case testBuiltinsUsingModule]
+o = None # type: __builtins__.object
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(o [__main__.o])
+    NameExpr(None [builtins.None])
+    builtins.object))
+
+[case testImplicitAccessToBuiltins]
+object
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    NameExpr(object [builtins.object])))
+
+[case testAssignmentToModuleAttribute]
+import _m
+_m.x = (
+  _m.x)
+[file _m.py]
+x = None
+[out]
+MypyFile:1(
+  Import:1(_m)
+  AssignmentStmt:2(
+    MemberExpr:2(
+      NameExpr(_m)
+      x [_m.x])
+    MemberExpr:3(
+      NameExpr(_m)
+      x [_m.x])))
+
+[case testAssignmentThatRefersToModule]
+import _m
+_m.x[None] = None
+[file _m.py]
+x = None
+[out]
+MypyFile:1(
+  Import:1(_m)
+  AssignmentStmt:2(
+    IndexExpr:2(
+      MemberExpr:2(
+        NameExpr(_m)
+        x [_m.x])
+      NameExpr(None [builtins.None]))
+    NameExpr(None [builtins.None])))
+
+[case testImportInBlock]
+if 1:
+    import _x
+    _x.y
+[file _x.py]
+y = 1
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      IntExpr(1))
+    Then(
+      Import:2(_x)
+      ExpressionStmt:3(
+        MemberExpr:3(
+          NameExpr(_x)
+          y [_x.y])))))
+
+[case testImportInFunction]
+def f() -> None:
+    import _x
+    _x.y
+[file _x.py]
+y = 1
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def ()
+    Block:1(
+      Import:2(_x)
+      ExpressionStmt:3(
+        MemberExpr:3(
+          NameExpr(_x)
+          y [_x.y])))))
+
+[case testImportInClassBody]
+class A:
+    from _x import y
+    z = y
+[file _x.py]
+y = 1
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    ImportFrom:2(_x, [y])
+    AssignmentStmt:3(
+      NameExpr(z* [m])
+      NameExpr(y [_x.y]))))
+
+[case testImportInClassBody2]
+class A:
+    import _x
+    z = _x.y
+[file _x.py]
+y = 1
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    Import:2(_x)
+    AssignmentStmt:3(
+      NameExpr(z* [m])
+      MemberExpr:3(
+        NameExpr(_x)
+        y [_x.y]))))
+
+[case testImportModuleTwice]
+def f() -> None:
+    import x
+    import x
+    x.y
+[file x.py]
+y = 1
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def ()
+    Block:1(
+      Import:2(x)
+      Import:3(x)
+      ExpressionStmt:4(
+        MemberExpr:4(
+          NameExpr(x)
+          y [x.y])))))
+MypyFile:1(
+  tmp/x.py
+  AssignmentStmt:1(
+    NameExpr(y* [x.y])
+    IntExpr(1)))
+
+[case testRelativeImport0]
+import m.x
+m.x.z.y
+[file m/__init__.py]
+[file m/x.py]
+from . import z
+[file m/z.py]
+y = 1
+[out]
+MypyFile:1(
+  Import:1(m.x)
+  ExpressionStmt:2(
+    MemberExpr:2(
+      MemberExpr:2(
+        MemberExpr:2(
+          NameExpr(m)
+          x [m.x])
+        z [m.z])
+      y [m.z.y])))
+MypyFile:1(
+  tmp/m/x.py
+  ImportFrom:1(., [z]))
+MypyFile:1(
+  tmp/m/z.py
+  AssignmentStmt:1(
+    NameExpr(y* [m.z.y])
+    IntExpr(1)))
+
+[case testRelativeImport1]
+import m.t.b as b
+b.x.y
+b.z.y
+[file m/__init__.py]
+[file m/x.py]
+y = 1
+[file m/z.py]
+y = 3
+[file m/t/__init__.py]
+[file m/t/b.py]
+from .. import x, z
+[out]
+MypyFile:1(
+  Import:1(m.t.b : b)
+  ExpressionStmt:2(
+    MemberExpr:2(
+      MemberExpr:2(
+        NameExpr(b [m.t.b])
+        x [m.x])
+      y [m.x.y]))
+  ExpressionStmt:3(
+    MemberExpr:3(
+      MemberExpr:3(
+        NameExpr(b [m.t.b])
+        z [m.z])
+      y [m.z.y])))
+MypyFile:1(
+  tmp/m/t/b.py
+  ImportFrom:1(.., [x, z]))
+MypyFile:1(
+  tmp/m/x.py
+  AssignmentStmt:1(
+    NameExpr(y* [m.x.y])
+    IntExpr(1)))
+MypyFile:1(
+  tmp/m/z.py
+  AssignmentStmt:1(
+    NameExpr(y* [m.z.y])
+    IntExpr(3)))
+
+[case testRelativeImport2]
+import m.t.b as b
+b.xy
+b.zy
+[file m/__init__.py]
+[file m/x.py]
+y = 1
+[file m/z.py]
+y = 3
+[file m/t/__init__.py]
+[file m/t/b.py]
+from ..x import y as xy
+from ..z import y as zy
+[out]
+MypyFile:1(
+  Import:1(m.t.b : b)
+  ExpressionStmt:2(
+    MemberExpr:2(
+      NameExpr(b [m.t.b])
+      xy [m.x.y]))
+  ExpressionStmt:3(
+    MemberExpr:3(
+      NameExpr(b [m.t.b])
+      zy [m.z.y])))
+MypyFile:1(
+  tmp/m/t/b.py
+  ImportFrom:1(..x, [y : xy])
+  ImportFrom:2(..z, [y : zy]))
+MypyFile:1(
+  tmp/m/x.py
+  AssignmentStmt:1(
+    NameExpr(y* [m.x.y])
+    IntExpr(1)))
+MypyFile:1(
+  tmp/m/z.py
+  AssignmentStmt:1(
+    NameExpr(y* [m.z.y])
+    IntExpr(3)))
+
+[case testRelativeImport3]
+import m.t
+m.zy
+m.xy
+m.t.y
+[file m/__init__.py]
+from .x import *
+from .z import *
+[file m/x.py]
+from .z import zy as xy
+[file m/z.py]
+zy = 3
+[file m/t/__init__.py]
+from .b import *
+[file m/t/b.py]
+from .. import xy as y
+[out]
+MypyFile:1(
+  Import:1(m.t)
+  ExpressionStmt:2(
+    MemberExpr:2(
+      NameExpr(m)
+      zy [m.z.zy]))
+  ExpressionStmt:3(
+    MemberExpr:3(
+      NameExpr(m)
+      xy [m.z.zy]))
+  ExpressionStmt:4(
+    MemberExpr:4(
+      MemberExpr:4(
+        NameExpr(m)
+        t [m.t])
+      y [m.z.zy])))
+MypyFile:1(
+  tmp/m/t/b.py
+  ImportFrom:1(.., [xy : y]))
+MypyFile:1(
+  tmp/m/x.py
+  ImportFrom:1(.z, [zy : xy]))
+MypyFile:1(
+  tmp/m/z.py
+  AssignmentStmt:1(
+    NameExpr(zy* [m.z.zy])
+    IntExpr(3)))
+
+[case testRelativeImportFromSameModule]
+import m.x
+[file m/__init__.py]
+[file m/x.py]
+from .x import nonexistent
+[out]
+tmp/m/x.py:1: error: Module 'm.x' has no attribute 'nonexistent'
+
+[case testImportFromSameModule]
+import m.x
+[file m/__init__.py]
+[file m/x.py]
+from m.x import nonexistent
+[out]
+tmp/m/x.py:1: error: Module 'm.x' has no attribute 'nonexistent'
+
+[case testFromImportAsInStub]
+from m import *
+x
+y  # E: Name 'y' is not defined
+[file m.pyi]
+from m2 import x as x
+from m2 import y
+[file m2.py]
+x = 1
+y = 2
+[out]
+
+[case testFromImportAsInNonStub]
+from m_ import *
+x
+y
+[file m_.py]
+from m2_ import x as x
+from m2_ import y
+[file m2_.py]
+x = 1
+y = 2
+[out]
+MypyFile:1(
+  ImportAll:1(m_)
+  ExpressionStmt:2(
+    NameExpr(x [m2_.x]))
+  ExpressionStmt:3(
+    NameExpr(y [m2_.y])))
+
+[case testImportAsInStub]
+from m import *
+m2
+m3  # E: Name 'm3' is not defined
+[file m.pyi]
+import m2 as m2
+import m3
+[file m2.py]
+[file m3.py]
+[out]
+
+[case testImportAsInNonStub]
+from m_ import *
+m2_
+m3_
+[file m_.py]
+import m2_ as m2_
+import m3_
+[file m2_.py]
+[file m3_.py]
+[out]
+MypyFile:1(
+  ImportAll:1(m_)
+  ExpressionStmt:2(
+    NameExpr(m2_))
+  ExpressionStmt:3(
+    NameExpr(m3_)))
+
+[case testErrorsInMultipleModules]
+import m
+x
+[file m.py]
+y
+[out]
+tmp/m.py:1: error: Name 'y' is not defined
+main:2: error: Name 'x' is not defined
+
+[case testImportTwice]
+import typing
+from x import a, a # ok (we could give a warning, but this is valid)
+def f() -> None:
+    from x import a
+    from x import a # ok
+import x
+import x # ok, since we may import multiple submodules of a package
+[file x.py]
+a = 1
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ImportFrom:2(x, [a, a])
+  FuncDef:3(
+    f
+    def ()
+    Block:3(
+      ImportFrom:4(x, [a])
+      ImportFrom:5(x, [a])))
+  Import:6(x)
+  Import:7(x))
+MypyFile:1(
+  tmp/x.py
+  AssignmentStmt:1(
+    NameExpr(a* [x.a])
+    IntExpr(1)))
diff --git a/test-data/unit/semanal-namedtuple.test b/test-data/unit/semanal-namedtuple.test
new file mode 100644
index 0000000..a820a07
--- /dev/null
+++ b/test-data/unit/semanal-namedtuple.test
@@ -0,0 +1,177 @@
+-- Semantic analysis of named tuples
+
+[case testSimpleNamedtuple]
+from collections import namedtuple
+N = namedtuple('N', ['a'])
+def f() -> N: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(collections, [namedtuple])
+  AssignmentStmt:2(
+    NameExpr(N* [__main__.N])
+    NamedTupleExpr:2(N, Tuple[Any]))
+  FuncDef:3(
+    f
+    def () -> Tuple[Any, fallback=__main__.N]
+    Block:3(
+      PassStmt:3())))
+
+[case testTwoItemNamedtuple]
+from collections import namedtuple
+N = namedtuple('N', ['a', 'xyz'])
+def f() -> N: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(collections, [namedtuple])
+  AssignmentStmt:2(
+    NameExpr(N* [__main__.N])
+    NamedTupleExpr:2(N, Tuple[Any, Any]))
+  FuncDef:3(
+    f
+    def () -> Tuple[Any, Any, fallback=__main__.N]
+    Block:3(
+      PassStmt:3())))
+
+[case testTwoItemNamedtupleWithTupleFieldNames]
+from collections import namedtuple
+N = namedtuple('N', ('a', 'xyz'))
+def f() -> N: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(collections, [namedtuple])
+  AssignmentStmt:2(
+    NameExpr(N* [__main__.N])
+    NamedTupleExpr:2(N, Tuple[Any, Any]))
+  FuncDef:3(
+    f
+    def () -> Tuple[Any, Any, fallback=__main__.N]
+    Block:3(
+      PassStmt:3())))
+
+[case testTwoItemNamedtupleWithShorthandSyntax]
+from collections import namedtuple
+N = namedtuple('N', ' a  xyz ')
+def f() -> N: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(collections, [namedtuple])
+  AssignmentStmt:2(
+    NameExpr(N* [__main__.N])
+    NamedTupleExpr:2(N, Tuple[Any, Any]))
+  FuncDef:3(
+    f
+    def () -> Tuple[Any, Any, fallback=__main__.N]
+    Block:3(
+      PassStmt:3())))
+
+[case testNamedTupleWithItemTypes]
+from typing import NamedTuple
+N = NamedTuple('N', [('a', int),
+                     ('b', str)])
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [NamedTuple])
+  AssignmentStmt:2(
+    NameExpr(N* [__main__.N])
+    NamedTupleExpr:2(N, Tuple[builtins.int, builtins.str])))
+
+[case testNamedTupleWithTupleFieldNamesWithItemTypes]
+from typing import NamedTuple
+N = NamedTuple('N', (('a', int),
+                     ('b', str)))
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [NamedTuple])
+  AssignmentStmt:2(
+    NameExpr(N* [__main__.N])
+    NamedTupleExpr:2(N, Tuple[builtins.int, builtins.str])))
+
+[case testNamedTupleBaseClass]
+from collections import namedtuple
+N = namedtuple('N', ['x'])
+class A(N): pass
+[out]
+MypyFile:1(
+  ImportFrom:1(collections, [namedtuple])
+  AssignmentStmt:2(
+    NameExpr(N* [__main__.N])
+    NamedTupleExpr:2(N, Tuple[Any]))
+  ClassDef:3(
+    A
+    TupleType(
+      Tuple[Any, fallback=__main__.N])
+    BaseType(
+      __main__.N)
+    PassStmt:3()))
+
+[case testNamedTupleBaseClass2]
+from collections import namedtuple
+class A(namedtuple('N', ['x'])): pass
+[out]
+MypyFile:1(
+  ImportFrom:1(collections, [namedtuple])
+  ClassDef:2(
+    A
+    TupleType(
+      Tuple[Any, fallback=__main__.N at 2])
+    BaseType(
+      __main__.N at 2)
+    PassStmt:2()))
+
+[case testNamedTupleBaseClassWithItemTypes]
+from typing import NamedTuple
+class A(NamedTuple('N', [('x', int)])): pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [NamedTuple])
+  ClassDef:2(
+    A
+    TupleType(
+      Tuple[builtins.int, fallback=__main__.N at 2])
+    BaseType(
+      __main__.N at 2)
+    PassStmt:2()))
+
+-- Errors
+
+[case testNamedTupleWithTooFewArguments]
+from collections import namedtuple
+N = namedtuple('N') # E: Too few arguments for namedtuple()
+
+[case testNamedTupleWithTooManyArguments]
+from collections import namedtuple
+N = namedtuple('N', ['x'], 'y') # E: Too many arguments for namedtuple()
+
+[case testNamedTupleWithInvalidName]
+from collections import namedtuple
+N = namedtuple(1, ['x']) # E: namedtuple() expects a string literal as the first argument
+
+[case testNamedTupleWithInvalidItems]
+from collections import namedtuple
+N = namedtuple('N', 1) # E: List or tuple literal expected as the second argument to namedtuple()
+
+[case testNamedTupleWithInvalidItems2]
+from collections import namedtuple
+N = namedtuple('N', ['x', 1]) # E: String literal expected as namedtuple() item
+
+[case testNamedTupleWithUnderscoreItemName]
+from collections import namedtuple
+N = namedtuple('N', ['_fallback']) # E: namedtuple() field names cannot start with an underscore: _fallback
+
+-- NOTE: The following code works at runtime but is not yet supported by mypy.
+--       Keyword arguments may potentially be supported in the future.
+[case testNamedTupleWithNonpositionalArgs]
+from collections import namedtuple
+N = namedtuple(typename='N', field_names=['x']) # E: Unexpected arguments to namedtuple()
+
+[case testInvalidNamedTupleBaseClass]
+from typing import NamedTuple
+class A(NamedTuple('N', [1])): pass # E: Tuple expected as NamedTuple() field
+class B(A): pass
+
+[case testInvalidNamedTupleBaseClass2]
+class A(NamedTuple('N', [1])): pass
+class B(A): pass
+[out]
+main:1: error: Name 'NamedTuple' is not defined
+main:1: error: Invalid base class
diff --git a/test-data/unit/semanal-python2.test b/test-data/unit/semanal-python2.test
new file mode 100644
index 0000000..97264a5
--- /dev/null
+++ b/test-data/unit/semanal-python2.test
@@ -0,0 +1,76 @@
+-- Python 2 semantic analysis test cases.
+
+[case testPrintStatement_python2]
+print int, None
+[out]
+MypyFile:1(
+  PrintStmt:1(
+    NameExpr(int [builtins.int])
+    NameExpr(None [builtins.None])
+    Newline))
+
+[case testPrintStatementWithTarget]
+print >>int, None
+[out]
+MypyFile:1(
+  PrintStmt:1(
+    NameExpr(None [builtins.None])
+    Target(
+      NameExpr(int [builtins.int]))
+    Newline))
+
+[case testExecStatement]
+exec None
+exec None in int
+exec None in int, str
+[out]
+MypyFile:1(
+  ExecStmt:1(
+    NameExpr(None [builtins.None]))
+  ExecStmt:2(
+    NameExpr(None [builtins.None])
+    NameExpr(int [builtins.int]))
+  ExecStmt:3(
+    NameExpr(None [builtins.None])
+    NameExpr(int [builtins.int])
+    NameExpr(str [builtins.str])))
+
+[case testVariableLengthTuple_python2]
+from typing import Tuple, cast
+cast(Tuple[int, ...], ())
+[builtins_py2 fixtures/tuple.pyi]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Tuple, cast])
+  ExpressionStmt:2(
+    CastExpr:2(
+      TupleExpr:2()
+      builtins.tuple[builtins.int])))
+
+[case testTupleArgList_python2]
+def f(x, (y, z)):
+    x = y
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x)
+      Var(__tuple_arg_2))
+    Block:1(
+      AssignmentStmt:1(
+        TupleExpr:1(
+          NameExpr(y* [l])
+          NameExpr(z* [l]))
+        NameExpr(__tuple_arg_2 [l]))
+      AssignmentStmt:2(
+        NameExpr(x [l])
+        NameExpr(y [l])))))
+
+[case testBackquoteExpr_python2]
+`object`
+[out]
+MypyFile:1(
+  ExpressionStmt:1(
+    BackquoteExpr:1(
+      NameExpr(object [builtins.object]))))
diff --git a/test-data/unit/semanal-statements.test b/test-data/unit/semanal-statements.test
new file mode 100644
index 0000000..e104ab7
--- /dev/null
+++ b/test-data/unit/semanal-statements.test
@@ -0,0 +1,929 @@
+[case testReturn]
+def f(x): return x
+def g(): return
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    Block:1(
+      ReturnStmt:1(
+        NameExpr(x [l]))))
+  FuncDef:2(
+    g
+    Block:2(
+      ReturnStmt:2())))
+
+[case testRaise]
+raise object()
+[out]
+MypyFile:1(
+  RaiseStmt:1(
+    CallExpr:1(
+      NameExpr(object [builtins.object])
+      Args())))
+
+[case testYield]
+def f(): yield f
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Generator
+    Block:1(
+      ExpressionStmt:1(
+        YieldExpr:1(
+          NameExpr(f [__main__.f]))))))
+
+[case testAssert]
+assert object
+[out]
+MypyFile:1(
+  AssertStmt:1(
+    NameExpr(object [builtins.object])))
+
+[case testOperatorAssignment]
+x = y = 1
+x += y
+y |= x
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  OperatorAssignmentStmt:2(
+    +
+    NameExpr(x [__main__.x])
+    NameExpr(y [__main__.y]))
+  OperatorAssignmentStmt:3(
+    |
+    NameExpr(y [__main__.y])
+    NameExpr(x [__main__.x])))
+
+[case testWhile]
+x = y = 1
+while x:
+  y
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  WhileStmt:2(
+    NameExpr(x [__main__.x])
+    Block:2(
+      ExpressionStmt:3(
+        NameExpr(y [__main__.y])))))
+
+[case testFor]
+for x in object:
+  x
+[out]
+MypyFile:1(
+  ForStmt:1(
+    NameExpr(x* [__main__.x])
+    NameExpr(object [builtins.object])
+    Block:1(
+      ExpressionStmt:2(
+        NameExpr(x [__main__.x])))))
+
+[case testForInFunction]
+def f():
+  for x in f:
+    x
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ForStmt:2(
+        NameExpr(x* [l])
+        NameExpr(f [__main__.f])
+        Block:2(
+          ExpressionStmt:3(
+            NameExpr(x [l])))))))
+
+[case testMultipleForIndexVars]
+for x, y in []:
+  x, y
+[out]
+MypyFile:1(
+  ForStmt:1(
+    TupleExpr:1(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    ListExpr:1()
+    Block:1(
+      ExpressionStmt:2(
+        TupleExpr:2(
+          NameExpr(x [__main__.x])
+          NameExpr(y [__main__.y]))))))
+
+[case testForIndexVarScope]
+for x in []:
+  pass
+x
+[out]
+MypyFile:1(
+  ForStmt:1(
+    NameExpr(x* [__main__.x])
+    ListExpr:1()
+    Block:1(
+      PassStmt:2()))
+  ExpressionStmt:3(
+    NameExpr(x [__main__.x])))
+
+[case testForIndexVarScope2]
+def f():
+  for x in []:
+    pass
+  x
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ForStmt:2(
+        NameExpr(x* [l])
+        ListExpr:2()
+        Block:2(
+          PassStmt:3()))
+      ExpressionStmt:4(
+        NameExpr(x [l])))))
+
+[case testReusingForLoopIndexVariable]
+for x in None:
+    pass
+for x in None:
+    pass
+[out]
+MypyFile:1(
+  ForStmt:1(
+    NameExpr(x* [__main__.x])
+    NameExpr(None [builtins.None])
+    Block:1(
+      PassStmt:2()))
+  ForStmt:3(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    Block:3(
+      PassStmt:4())))
+
+[case testReusingForLoopIndexVariable2]
+def f():
+    for x in None:
+        pass
+    for x in None:
+        pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      ForStmt:2(
+        NameExpr(x* [l])
+        NameExpr(None [builtins.None])
+        Block:2(
+          PassStmt:3()))
+      ForStmt:4(
+        NameExpr(x [l])
+        NameExpr(None [builtins.None])
+        Block:4(
+          PassStmt:5())))))
+
+[case testLoopWithElse]
+for x in []:
+  pass
+else:
+  x
+while 1:
+  pass
+else:
+  x
+[out]
+MypyFile:1(
+  ForStmt:1(
+    NameExpr(x* [__main__.x])
+    ListExpr:1()
+    Block:1(
+      PassStmt:2())
+    Else(
+      ExpressionStmt:4(
+        NameExpr(x [__main__.x]))))
+  WhileStmt:5(
+    IntExpr(1)
+    Block:5(
+      PassStmt:6())
+    Else(
+      ExpressionStmt:8(
+        NameExpr(x [__main__.x])))))
+
+[case testBreak]
+while 1:
+  break
+for x in []:
+  break
+[out]
+MypyFile:1(
+  WhileStmt:1(
+    IntExpr(1)
+    Block:1(
+      BreakStmt:2()))
+  ForStmt:3(
+    NameExpr(x* [__main__.x])
+    ListExpr:3()
+    Block:3(
+      BreakStmt:4())))
+
+[case testContinue]
+while 1:
+  continue
+for x in []:
+  continue
+[out]
+MypyFile:1(
+  WhileStmt:1(
+    IntExpr(1)
+    Block:1(
+      ContinueStmt:2()))
+  ForStmt:3(
+    NameExpr(x* [__main__.x])
+    ListExpr:3()
+    Block:3(
+      ContinueStmt:4())))
+
+[case testIf]
+x = 1
+if x:
+  x
+elif x:
+  x
+elif x:
+  x
+else:
+  x
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  IfStmt:2(
+    If(
+      NameExpr(x [__main__.x]))
+    Then(
+      ExpressionStmt:3(
+        NameExpr(x [__main__.x])))
+    Else(
+      IfStmt:4(
+        If(
+          NameExpr(x [__main__.x]))
+        Then(
+          ExpressionStmt:5(
+            NameExpr(x [__main__.x])))
+        Else(
+          IfStmt:6(
+            If(
+              NameExpr(x [__main__.x]))
+            Then(
+              ExpressionStmt:7(
+                NameExpr(x [__main__.x])))
+            Else(
+              ExpressionStmt:9(
+                NameExpr(x [__main__.x])))))))))
+
+[case testSimpleIf]
+if object:
+  object
+[out]
+MypyFile:1(
+  IfStmt:1(
+    If(
+      NameExpr(object [builtins.object]))
+    Then(
+      ExpressionStmt:2(
+        NameExpr(object [builtins.object])))))
+
+[case testLvalues]
+x = y = 1
+x = 1
+x.m = 1
+x[y] = 1
+x, y = 1
+[x, y] = 1
+(x, y) = 1
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    IntExpr(1))
+  AssignmentStmt:3(
+    MemberExpr:3(
+      NameExpr(x [__main__.x])
+      m)
+    IntExpr(1))
+  AssignmentStmt:4(
+    IndexExpr:4(
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y]))
+    IntExpr(1))
+  AssignmentStmt:5(
+    TupleExpr:5(
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y]))
+    IntExpr(1))
+  AssignmentStmt:6(
+    ListExpr:6(
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y]))
+    IntExpr(1))
+  AssignmentStmt:7(
+    TupleExpr:7(
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y]))
+    IntExpr(1)))
+
+[case testStarLvalues]
+*x, y = 1
+*x, (y, *z) = 1
+*(x, q), r = 1
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    TupleExpr:1(
+      StarExpr:1(
+        NameExpr(x* [__main__.x]))
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  AssignmentStmt:2(
+    TupleExpr:2(
+      StarExpr:2(
+        NameExpr(x [__main__.x]))
+      TupleExpr:2(
+        NameExpr(y [__main__.y])
+        StarExpr:2(
+          NameExpr(z* [__main__.z]))))
+    IntExpr(1))
+  AssignmentStmt:3(
+    TupleExpr:3(
+      StarExpr:3(
+        TupleExpr:3(
+          NameExpr(x [__main__.x])
+          NameExpr(q* [__main__.q])))
+      NameExpr(r* [__main__.r]))
+    IntExpr(1)))
+
+[case testMultipleDefinition]
+x, y = 1
+x, y = 2
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    TupleExpr:1(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  AssignmentStmt:2(
+    TupleExpr:2(
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y]))
+    IntExpr(2)))
+
+[case testComplexDefinitions]
+(x) = 1
+([y]) = 2
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  AssignmentStmt:2(
+    ListExpr:2(
+      NameExpr(y* [__main__.y]))
+    IntExpr(2)))
+
+[case testLocalComplexDefinition]
+def f():
+  (x) = 1
+  x
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      AssignmentStmt:2(
+        NameExpr(x* [l])
+        IntExpr(1))
+      ExpressionStmt:3(
+        NameExpr(x [l])))))
+
+[case testMultipleDefOnlySomeNew]
+x = 1
+y, x = 1
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  AssignmentStmt:2(
+    TupleExpr:2(
+      NameExpr(y* [__main__.y])
+      NameExpr(x [__main__.x]))
+    IntExpr(1)))
+
+[case testMultipleDefOnlySomeNewNestedTuples]
+x = 1
+y, (x, z) = 1
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  AssignmentStmt:2(
+    TupleExpr:2(
+      NameExpr(y* [__main__.y])
+      TupleExpr:2(
+        NameExpr(x [__main__.x])
+        NameExpr(z* [__main__.z])))
+    IntExpr(1)))
+
+[case testMultipleDefOnlySomeNewNestedLists]
+x = 1
+y, [x, z] = 1
+[p, [x, r]] = 1
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  AssignmentStmt:2(
+    TupleExpr:2(
+      NameExpr(y* [__main__.y])
+      ListExpr:2(
+        NameExpr(x [__main__.x])
+        NameExpr(z* [__main__.z])))
+    IntExpr(1))
+  AssignmentStmt:3(
+    ListExpr:3(
+      NameExpr(p* [__main__.p])
+      ListExpr:3(
+        NameExpr(x [__main__.x])
+        NameExpr(r* [__main__.r])))
+    IntExpr(1)))
+
+[case testIndexedDel]
+x = y = 1
+del x[y]
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    Lvalues(
+      NameExpr(x* [__main__.x])
+      NameExpr(y* [__main__.y]))
+    IntExpr(1))
+  DelStmt:2(
+    IndexExpr:2(
+      NameExpr(x [__main__.x])
+      NameExpr(y [__main__.y]))))
+
+[case testDelGlobalName]
+x = 1
+del x
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(x* [__main__.x])
+    IntExpr(1))
+  DelStmt:2(
+    NameExpr(x [__main__.x])))
+
+[case testDelLocalName]
+def f(x):
+    del x
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x))
+    Block:1(
+      DelStmt:2(
+        NameExpr(x [l])))))
+
+[case testDelMultipleThings]
+def f(x, y):
+    del x, y[0]
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(x)
+      Var(y))
+    Block:1(
+      DelStmt:2(
+        TupleExpr:2(
+          NameExpr(x [l])
+          IndexExpr:2(
+            NameExpr(y [l])
+            IntExpr(0)))))))
+
+[case testDelMultipleThingsInvalid]
+def f(x, y) -> None:
+    del x, y + 1
+[out]
+main:2: error: can't delete operator
+
+[case testTry]
+class c: pass
+try:
+  c
+except object:
+  c
+except c as e:
+  e
+except:
+  c
+finally:
+  c
+[out]
+MypyFile:1(
+  ClassDef:1(
+    c
+    PassStmt:1())
+  TryStmt:2(
+    Block:2(
+      ExpressionStmt:3(
+        NameExpr(c [__main__.c])))
+    NameExpr(object [builtins.object])
+    Block:4(
+      ExpressionStmt:5(
+        NameExpr(c [__main__.c])))
+    NameExpr(c [__main__.c])
+    NameExpr(e* [__main__.e])
+    Block:6(
+      ExpressionStmt:7(
+        NameExpr(e [__main__.e])))
+    Block:8(
+      ExpressionStmt:9(
+        NameExpr(c [__main__.c])))
+    Finally(
+      ExpressionStmt:11(
+        NameExpr(c [__main__.c])))))
+
+[case testTryElse]
+try:
+  pass
+except:
+  pass
+else:
+  object
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      PassStmt:2())
+    Block:3(
+      PassStmt:4())
+    Else(
+      ExpressionStmt:6(
+        NameExpr(object [builtins.object])))))
+
+[case testTryWithOnlyFinally]
+try:
+  pass
+finally:
+  pass
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      PassStmt:2())
+    Finally(
+      PassStmt:4())))
+
+[case testExceptWithMultipleTypes]
+class c: pass
+try:
+  pass
+except (c, object) as e:
+  e
+[out]
+MypyFile:1(
+  ClassDef:1(
+    c
+    PassStmt:1())
+  TryStmt:2(
+    Block:2(
+      PassStmt:3())
+    TupleExpr:4(
+      NameExpr(c [__main__.c])
+      NameExpr(object [builtins.object]))
+    NameExpr(e* [__main__.e])
+    Block:4(
+      ExpressionStmt:5(
+        NameExpr(e [__main__.e])))))
+
+[case testRaiseWithoutExpr]
+raise
+[out]
+MypyFile:1(
+  RaiseStmt:1())
+
+[case testWith]
+with object:
+  object
+[out]
+MypyFile:1(
+  WithStmt:1(
+    Expr(
+      NameExpr(object [builtins.object]))
+    Block:1(
+      ExpressionStmt:2(
+        NameExpr(object [builtins.object])))))
+
+[case testWithAndVariable]
+with object as x:
+  x
+[out]
+MypyFile:1(
+  WithStmt:1(
+    Expr(
+      NameExpr(object [builtins.object]))
+    Target(
+      NameExpr(x* [__main__.x]))
+    Block:1(
+      ExpressionStmt:2(
+        NameExpr(x [__main__.x])))))
+
+[case testWithInFunction]
+def f():
+  with f as x:
+    x
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Block:1(
+      WithStmt:2(
+        Expr(
+          NameExpr(f [__main__.f]))
+        Target(
+          NameExpr(x* [l]))
+        Block:2(
+          ExpressionStmt:3(
+            NameExpr(x [l])))))))
+
+[case testComplexWith]
+with object, object:
+  pass
+with object as a, object as b:
+  pass
+[out]
+MypyFile:1(
+  WithStmt:1(
+    Expr(
+      NameExpr(object [builtins.object]))
+    Expr(
+      NameExpr(object [builtins.object]))
+    Block:1(
+      PassStmt:2()))
+  WithStmt:3(
+    Expr(
+      NameExpr(object [builtins.object]))
+    Target(
+      NameExpr(a* [__main__.a]))
+    Expr(
+      NameExpr(object [builtins.object]))
+    Target(
+      NameExpr(b* [__main__.b]))
+    Block:3(
+      PassStmt:4())))
+
+[case testVariableInBlock]
+while object:
+  x = None
+  x = x
+[out]
+MypyFile:1(
+  WhileStmt:1(
+    NameExpr(object [builtins.object])
+    Block:1(
+      AssignmentStmt:2(
+        NameExpr(x* [__main__.x])
+        NameExpr(None [builtins.None]))
+      AssignmentStmt:3(
+        NameExpr(x [__main__.x])
+        NameExpr(x [__main__.x])))))
+
+[case testVariableInExceptHandler]
+try:
+  pass
+except object as o:
+  x = None
+  o = x
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      PassStmt:2())
+    NameExpr(object [builtins.object])
+    NameExpr(o* [__main__.o])
+    Block:3(
+      AssignmentStmt:4(
+        NameExpr(x* [__main__.x])
+        NameExpr(None [builtins.None]))
+      AssignmentStmt:5(
+        NameExpr(o [__main__.o])
+        NameExpr(x [__main__.x])))))
+
+[case testCallInExceptHandler]
+try:
+  pass
+except object as o:
+  o = object()
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      PassStmt:2())
+    NameExpr(object [builtins.object])
+    NameExpr(o* [__main__.o])
+    Block:3(
+      AssignmentStmt:4(
+        NameExpr(o [__main__.o])
+        CallExpr:4(
+          NameExpr(object [builtins.object])
+          Args())))))
+
+[case testTryExceptWithMultipleHandlers]
+try:
+    pass
+except BaseException as e:
+    pass
+except Err as f:
+    f = BaseException() # Fail
+    f = Err()
+class Err(BaseException): pass
+[builtins fixtures/exception.pyi]
+[out]
+MypyFile:1(
+  TryStmt:1(
+    Block:1(
+      PassStmt:2())
+    NameExpr(BaseException [builtins.BaseException])
+    NameExpr(e* [__main__.e])
+    Block:3(
+      PassStmt:4())
+    NameExpr(Err [__main__.Err])
+    NameExpr(f* [__main__.f])
+    Block:5(
+      AssignmentStmt:6(
+        NameExpr(f [__main__.f])
+        CallExpr:6(
+          NameExpr(BaseException [builtins.BaseException])
+          Args()))
+      AssignmentStmt:7(
+        NameExpr(f [__main__.f])
+        CallExpr:7(
+          NameExpr(Err [__main__.Err])
+          Args()))))
+  ClassDef:8(
+    Err
+    BaseType(
+      builtins.BaseException)
+    PassStmt:8()))
+
+[case testMultipleAssignmentWithPartialNewDef]
+o = None
+x, o = o, o
+[out]
+MypyFile:1(
+  AssignmentStmt:1(
+    NameExpr(o* [__main__.o])
+    NameExpr(None [builtins.None]))
+  AssignmentStmt:2(
+    TupleExpr:2(
+      NameExpr(x* [__main__.x])
+      NameExpr(o [__main__.o]))
+    TupleExpr:2(
+      NameExpr(o [__main__.o])
+      NameExpr(o [__main__.o]))))
+
+[case testFunctionDecorator]
+def decorate(f): pass
+ at decorate
+def g():
+    g()
+[out]
+MypyFile:1(
+  FuncDef:1(
+    decorate
+    Args(
+      Var(f))
+    Block:1(
+      PassStmt:1()))
+  Decorator:2(
+    Var(g)
+    NameExpr(decorate [__main__.decorate])
+    FuncDef:3(
+      g
+      Block:3(
+        ExpressionStmt:4(
+          CallExpr:4(
+            NameExpr(g [__main__.g])
+            Args()))))))
+
+[case testTryWithinFunction]
+def f() -> None:
+    try:
+        pass
+    except object as o:
+        pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def ()
+    Block:1(
+      TryStmt:2(
+        Block:2(
+          PassStmt:3())
+        NameExpr(object [builtins.object])
+        NameExpr(o* [l])
+        Block:4(
+          PassStmt:5())))))
+
+[case testReuseExceptionVariable]
+def f() -> None:
+    try:
+        pass
+    except object as o:
+        pass
+    except object as o:
+        pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    def ()
+    Block:1(
+      TryStmt:2(
+        Block:2(
+          PassStmt:3())
+        NameExpr(object [builtins.object])
+        NameExpr(o* [l])
+        Block:4(
+          PassStmt:5())
+        NameExpr(object [builtins.object])
+        NameExpr(o [l])
+        Block:6(
+          PassStmt:7())))))
+
+[case testWithMultiple]
+def f(a):
+    pass
+def main():
+    with f(0) as a, f(a) as b:
+        x = a, b
+[out]
+MypyFile:1(
+  FuncDef:1(
+    f
+    Args(
+      Var(a))
+    Block:1(
+      PassStmt:2()))
+  FuncDef:3(
+    main
+    Block:3(
+      WithStmt:4(
+        Expr(
+          CallExpr:4(
+            NameExpr(f [__main__.f])
+            Args(
+              IntExpr(0))))
+        Target(
+          NameExpr(a* [l]))
+        Expr(
+          CallExpr:4(
+            NameExpr(f [__main__.f])
+            Args(
+              NameExpr(a [l]))))
+        Target(
+          NameExpr(b* [l]))
+        Block:4(
+          AssignmentStmt:5(
+            NameExpr(x* [l])
+            TupleExpr:5(
+              NameExpr(a [l])
+              NameExpr(b [l]))))))))
diff --git a/test-data/unit/semanal-symtable.test b/test-data/unit/semanal-symtable.test
new file mode 100644
index 0000000..4821635
--- /dev/null
+++ b/test-data/unit/semanal-symtable.test
@@ -0,0 +1,52 @@
+[case testEmptyFile]
+[out]
+-- Note that builtins are ignored to simplify output.
+__main__:
+  SymbolTable()
+
+[case testVarDef]
+x = 1
+[out]
+__main__:
+  SymbolTable(
+    x : Gdef/Var (__main__))
+
+[case testFuncDef]
+def f(): pass
+[out]
+__main__:
+  SymbolTable(
+    f : Gdef/FuncDef (__main__))
+
+[case testEmptyClassDef]
+class c: pass
+[out]
+__main__:
+  SymbolTable(
+    c : Gdef/TypeInfo (__main__))
+
+[case testImport]
+import m
+[file m.py]
+x = 1
+[out]
+__main__:
+  SymbolTable(
+    m : ModuleRef/MypyFile (__main__))
+m:
+  SymbolTable(
+    x : Gdef/Var (m))
+
+[case testImportFromModule]
+from m import x
+[file m.py]
+class x: pass
+y = 1
+[out]
+__main__:
+  SymbolTable(
+    x : Gdef/TypeInfo (__main__))
+m:
+  SymbolTable(
+    x : Gdef/TypeInfo (m)
+    y : Gdef/Var (m))
diff --git a/test-data/unit/semanal-typealiases.test b/test-data/unit/semanal-typealiases.test
new file mode 100644
index 0000000..5178a71
--- /dev/null
+++ b/test-data/unit/semanal-typealiases.test
@@ -0,0 +1,440 @@
+[case testListTypeAlias]
+from typing import List
+def f() -> List[int]: pass
+[builtins fixtures/list.pyi]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [List])
+  FuncDef:2(
+    f
+    def () -> builtins.list[builtins.int]
+    Block:2(
+      PassStmt:2())))
+
+[case testDictTypeAlias]
+from typing import Dict
+def f() -> Dict[int, str]: pass
+[builtins fixtures/dict.pyi]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Dict])
+  FuncDef:2(
+    f
+    def () -> builtins.dict[builtins.int, builtins.str]
+    Block:2(
+      PassStmt:2())))
+
+[case testQualifiedTypeAlias]
+import typing
+def f() -> typing.List[int]: pass
+[builtins fixtures/list.pyi]
+[out]
+MypyFile:1(
+  Import:1(typing)
+  FuncDef:2(
+    f
+    def () -> builtins.list[builtins.int]
+    Block:2(
+      PassStmt:2())))
+
+[case testTypeApplicationWithTypeAlias]
+from typing import List
+List[List[int]]
+[builtins fixtures/list.pyi]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [List])
+  ExpressionStmt:2(
+    TypeApplication:2(
+      NameExpr(List [builtins.list])
+      Types(
+        builtins.list[builtins.int]))))
+
+[case testTypeApplicationWithQualifiedTypeAlias]
+import typing
+typing.List[typing.List[int]]
+[builtins fixtures/list.pyi]
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ExpressionStmt:2(
+    TypeApplication:2(
+      MemberExpr:2(
+        NameExpr(typing)
+        List [builtins.list])
+      Types(
+        builtins.list[builtins.int]))))
+
+[case testSimpleTypeAlias]
+import typing
+class A: pass
+A2 = A
+def f(x: A2) -> A: pass
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ClassDef:2(
+    A
+    PassStmt:2())
+  AssignmentStmt:3(
+    NameExpr(A2* [__main__.A2])
+    NameExpr(A [__main__.A]))
+  FuncDef:4(
+    f
+    Args(
+      Var(x))
+    def (x: __main__.A) -> __main__.A
+    Block:4(
+      PassStmt:4())))
+
+[case testQualifiedSimpleTypeAlias]
+import typing
+import _m
+A2 = _m.A
+x = 1 # type: A2
+[file _m.py]
+import typing
+class A: pass
+[out]
+MypyFile:1(
+  Import:1(typing)
+  Import:2(_m)
+  AssignmentStmt:3(
+    NameExpr(A2* [__main__.A2])
+    MemberExpr:3(
+      NameExpr(_m)
+      A [_m.A]))
+  AssignmentStmt:4(
+    NameExpr(x [__main__.x])
+    IntExpr(1)
+    _m.A))
+
+[case testUnionTypeAlias]
+from typing import Union
+U = Union[int, str]
+def f(x: U) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Union])
+  AssignmentStmt:2(
+    NameExpr(U* [__main__.U])
+    TypeAliasExpr(Union[builtins.int, builtins.str]))
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def (x: Union[builtins.int, builtins.str])
+    Block:3(
+      PassStmt:3())))
+
+[case testUnionTypeAlias2]
+from typing import Union
+class A: pass
+U = Union[int, A]
+def f(x: U) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Union])
+  ClassDef:2(
+    A
+    PassStmt:2())
+  AssignmentStmt:3(
+    NameExpr(U* [__main__.U])
+    TypeAliasExpr(Union[builtins.int, __main__.A]))
+  FuncDef:4(
+    f
+    Args(
+      Var(x))
+    def (x: Union[builtins.int, __main__.A])
+    Block:4(
+      PassStmt:4())))
+
+[case testUnionTypeAliasWithQualifiedUnion]
+import typing
+U = typing.Union[int, str]
+def f(x: U) -> None: pass
+[out]
+MypyFile:1(
+  Import:1(typing)
+  AssignmentStmt:2(
+    NameExpr(U* [__main__.U])
+    TypeAliasExpr(Union[builtins.int, builtins.str]))
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def (x: Union[builtins.int, builtins.str])
+    Block:3(
+      PassStmt:3())))
+
+[case testTupleTypeAlias]
+from typing import Tuple
+T = Tuple[int, str]
+def f(x: T) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Tuple])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeAliasExpr(Tuple[builtins.int, builtins.str]))
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def (x: Tuple[builtins.int, builtins.str])
+    Block:3(
+      PassStmt:3())))
+
+[case testCallableTypeAlias]
+from typing import Callable
+C = Callable[[int], None]
+def f(x: C) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Callable])
+  AssignmentStmt:2(
+    NameExpr(C* [__main__.C])
+    TypeAliasExpr(def (builtins.int)))
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def (x: def (builtins.int))
+    Block:3(
+      PassStmt:3())))
+
+[case testGenericTypeAlias]
+from typing import Generic, TypeVar
+T = TypeVar('T')
+class G(Generic[T]): pass
+A = G[int]
+def f(x: A) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Generic, TypeVar])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2())
+  ClassDef:3(
+    G
+    TypeVars(
+      T)
+    PassStmt:3())
+  AssignmentStmt:4(
+    NameExpr(A* [__main__.A])
+    TypeAliasExpr(__main__.G[builtins.int]))
+  FuncDef:5(
+    f
+    Args(
+      Var(x))
+    def (x: __main__.G[builtins.int])
+    Block:5(
+      PassStmt:5())))
+
+[case testGenericTypeAlias2]
+from typing import List
+A = List[int]
+def f(x: A) -> None: pass
+[builtins fixtures/list.pyi]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [List])
+  AssignmentStmt:2(
+    NameExpr(A* [__main__.A])
+    TypeAliasExpr(builtins.list[builtins.int]))
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def (x: builtins.list[builtins.int])
+    Block:3(
+      PassStmt:3())))
+
+[case testImportUnionTypeAlias]
+import typing
+from _m import U
+def f(x: U) -> None: pass
+[file _m.py]
+from typing import Union
+class A: pass
+U = Union[int, A]
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ImportFrom:2(_m, [U])
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def (x: Union[builtins.int, _m.A])
+    Block:3(
+      PassStmt:3())))
+
+[case testImportUnionTypeAlias2]
+import typing
+import _m
+def f(x: _m.U) -> None: pass
+[file _m.py]
+from typing import Union
+class A: pass
+U = Union[int, A]
+[out]
+MypyFile:1(
+  Import:1(typing)
+  Import:2(_m)
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def (x: Union[builtins.int, _m.A])
+    Block:3(
+      PassStmt:3())))
+
+[case testImportSimpleTypeAlias]
+import typing
+from _m import A
+def f(x: A) -> None: pass
+[file _m.py]
+import typing
+A = int
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ImportFrom:2(_m, [A])
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def (x: builtins.int)
+    Block:3(
+      PassStmt:3())))
+
+[case testImportSimpleTypeAlias2]
+import typing
+import _m
+def f(x: _m.A) -> None: pass
+[file _m.py]
+import typing
+A = int
+[out]
+MypyFile:1(
+  Import:1(typing)
+  Import:2(_m)
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def (x: builtins.int)
+    Block:3(
+      PassStmt:3())))
+
+[case testAnyTypeAlias]
+from typing import Any
+A = Any
+a = 1 # type: A
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Any])
+  AssignmentStmt:2(
+    NameExpr(A* [__main__.A])
+    NameExpr(Any [typing.Any]))
+  AssignmentStmt:3(
+    NameExpr(a [__main__.a])
+    IntExpr(1)
+    Any))
+
+[case testAnyTypeAlias2]
+import typing
+A = typing.Any
+a = 1 # type: A
+[out]
+MypyFile:1(
+  Import:1(typing)
+  AssignmentStmt:2(
+    NameExpr(A* [__main__.A])
+    MemberExpr:2(
+      NameExpr(typing)
+      Any [typing.Any]))
+  AssignmentStmt:3(
+    NameExpr(a [__main__.a])
+    IntExpr(1)
+    Any))
+
+[case testTypeAliasAlias]
+from typing import Union
+U = Union[int, str]
+U2 = U
+x = 1 # type: U2
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Union])
+  AssignmentStmt:2(
+    NameExpr(U* [__main__.U])
+    TypeAliasExpr(Union[builtins.int, builtins.str]))
+  AssignmentStmt:3(
+    NameExpr(U2* [__main__.U2])
+    NameExpr(U [__main__.U]))
+  AssignmentStmt:4(
+    NameExpr(x [__main__.x])
+    IntExpr(1)
+    Union[builtins.int, builtins.str]))
+
+[case testTypeAliasOfImportedAlias]
+from typing import Union
+from _m import U
+U2 = U
+x = 1 # type: U2
+[file _m.py]
+from typing import Union
+U = Union[int, str]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Union])
+  ImportFrom:2(_m, [U])
+  AssignmentStmt:3(
+    NameExpr(U2* [__main__.U2])
+    NameExpr(U [_m.U]))
+  AssignmentStmt:4(
+    NameExpr(x [__main__.x])
+    IntExpr(1)
+    Union[builtins.int, builtins.str]))
+
+[case testListTypeDoesNotGenerateAlias]
+import typing
+A = [int, str]
+a = 1 # type: A  # E: Invalid type "__main__.A"
+
+[case testCantUseStringLiteralAsTypeAlias]
+from typing import Union
+A = 'Union[int, str]'
+a = 1 # type: A  # E: Invalid type "__main__.A"
+
+[case testStringLiteralTypeAsAliasComponent]
+from typing import Union
+A = Union['int', str]
+a = 1 # type: A
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Union])
+  AssignmentStmt:2(
+    NameExpr(A* [__main__.A])
+    TypeAliasExpr(Union[builtins.int, builtins.str]))
+  AssignmentStmt:3(
+    NameExpr(a [__main__.a])
+    IntExpr(1)
+    Union[builtins.int, builtins.str]))
+
+[case testComplexTypeAlias]
+from typing import Union, Tuple, Any
+A = Union['int', Tuple[int, Any]]
+a = 1 # type: A
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Union, Tuple, Any])
+  AssignmentStmt:2(
+    NameExpr(A* [__main__.A])
+    TypeAliasExpr(Union[builtins.int, Tuple[builtins.int, Any]]))
+  AssignmentStmt:3(
+    NameExpr(a [__main__.a])
+    IntExpr(1)
+    Union[builtins.int, Tuple[builtins.int, Any]]))
diff --git a/test-data/unit/semanal-typeddict.test b/test-data/unit/semanal-typeddict.test
new file mode 100644
index 0000000..a0229d8
--- /dev/null
+++ b/test-data/unit/semanal-typeddict.test
@@ -0,0 +1,81 @@
+-- Create Type
+
+-- TODO: Implement support for this syntax.
+--[case testCanCreateTypedDictTypeWithKeywordArguments]
+--from mypy_extensions import TypedDict
+--Point = TypedDict('Point', x=int, y=int)
+--[builtins fixtures/dict.pyi]
+--[out]
+--MypyFile:1(
+--  ImportFrom:1(mypy_extensions, [TypedDict])
+--  AssignmentStmt:2(
+--    NameExpr(Point* [__main__.Point])
+--    TypedDictExpr:2(Point)))
+
+-- TODO: Implement support for this syntax.
+--[case testCanCreateTypedDictTypeWithDictCall]
+--from mypy_extensions import TypedDict
+--Point = TypedDict('Point', dict(x=int, y=int))
+--[builtins fixtures/dict.pyi]
+--[out]
+--MypyFile:1(
+--  ImportFrom:1(mypy_extensions, [TypedDict])
+--  AssignmentStmt:2(
+--    NameExpr(Point* [__main__.Point])
+--    TypedDictExpr:2(Point)))
+
+[case testCanCreateTypedDictTypeWithDictLiteral]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int})
+[builtins fixtures/dict.pyi]
+[out]
+MypyFile:1(
+  ImportFrom:1(mypy_extensions, [TypedDict])
+  AssignmentStmt:2(
+    NameExpr(Point* [__main__.Point])
+    TypedDictExpr:2(Point)))
+
+
+-- Create Type (Errors)
+
+[case testCannotCreateTypedDictTypeWithTooFewArguments]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point')  # E: Too few arguments for TypedDict()
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictTypeWithTooManyArguments]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int}, dict)  # E: Too many arguments for TypedDict()
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictTypeWithInvalidName]
+from mypy_extensions import TypedDict
+Point = TypedDict(dict, {'x': int, 'y': int})  # E: TypedDict() expects a string literal as the first argument
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictTypeWithInvalidItems]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x'})  # E: TypedDict() expects a dictionary literal as the second argument
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictTypeWithUnderscoreItemName]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': int, 'y': int, '_fallback': object})  # E: TypedDict() item names cannot start with an underscore: _fallback
+[builtins fixtures/dict.pyi]
+
+-- NOTE: The following code works at runtime but is not yet supported by mypy.
+--       Keyword arguments may potentially be supported in the future.
+[case testCannotCreateTypedDictTypeWithNonpositionalArgs]
+from mypy_extensions import TypedDict
+Point = TypedDict(typename='Point', fields={'x': int, 'y': int})  # E: Unexpected arguments to TypedDict()
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictTypeWithInvalidItemName]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {int: int, int: int})  # E: Invalid TypedDict() field name
+[builtins fixtures/dict.pyi]
+
+[case testCannotCreateTypedDictTypeWithInvalidItemType]
+from mypy_extensions import TypedDict
+Point = TypedDict('Point', {'x': 1, 'y': 1})  # E: Invalid field type
+[builtins fixtures/dict.pyi]
diff --git a/test-data/unit/semanal-typeinfo.test b/test-data/unit/semanal-typeinfo.test
new file mode 100644
index 0000000..6bb62e1
--- /dev/null
+++ b/test-data/unit/semanal-typeinfo.test
@@ -0,0 +1,80 @@
+[case testEmptyFile]
+[out]
+TypeInfoMap()
+
+[case testEmptyClass]
+class c: pass
+[out]
+TypeInfoMap(
+  __main__.c : TypeInfo(
+    Name(__main__.c)
+    Bases(builtins.object)
+    Names()))
+
+[case testClassWithMethod]
+class c:
+  def f(self): pass
+[out]
+TypeInfoMap(
+  __main__.c : TypeInfo(
+    Name(__main__.c)
+    Bases(builtins.object)
+    Names(
+      f)))
+
+[case testClassWithAttributes]
+class c:
+  def __init__(self, x):
+    self.y = x
+    self.z = 1
+[out]
+TypeInfoMap(
+  __main__.c : TypeInfo(
+    Name(__main__.c)
+    Bases(builtins.object)
+    Names(
+      __init__
+      y
+      z)))
+
+[case testBaseClass]
+class base: pass
+class c(base): pass
+[out]
+TypeInfoMap(
+  __main__.base : TypeInfo(
+    Name(__main__.base)
+    Bases(builtins.object)
+    Names())
+  __main__.c : TypeInfo(
+    Name(__main__.c)
+    Bases(__main__.base)
+    Names()))
+
+[case testClassAndAbstractClass]
+from abc import abstractmethod, ABCMeta
+import typing
+
+class i(metaclass=ABCMeta): pass
+class c(i): pass
+[out]
+TypeInfoMap(
+  __main__.c : TypeInfo(
+    Name(__main__.c)
+    Bases(__main__.i)
+    Names())
+  __main__.i : TypeInfo(
+    Name(__main__.i)
+    Bases(builtins.object)
+    Names()))
+
+[case testAttributeWithoutType]
+class A:
+    a = A
+[out]
+TypeInfoMap(
+  __main__.A : TypeInfo(
+    Name(__main__.A)
+    Bases(builtins.object)
+    Names(
+      a)))
diff --git a/test-data/unit/semanal-types.test b/test-data/unit/semanal-types.test
new file mode 100644
index 0000000..beb3cbf
--- /dev/null
+++ b/test-data/unit/semanal-types.test
@@ -0,0 +1,1465 @@
+[case testVarWithType]
+import typing
+class A: pass
+x = A() # type: A
+y = x
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ClassDef:2(
+    A
+    PassStmt:2())
+  AssignmentStmt:3(
+    NameExpr(x [__main__.x])
+    CallExpr:3(
+      NameExpr(A [__main__.A])
+      Args())
+    __main__.A)
+  AssignmentStmt:4(
+    NameExpr(y* [__main__.y])
+    NameExpr(x [__main__.x])))
+
+[case testLocalVarWithType]
+class A: pass
+def f():
+  x = None # type: A
+  y = x
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    PassStmt:1())
+  FuncDef:2(
+    f
+    Block:2(
+      AssignmentStmt:3(
+        NameExpr(x [l])
+        NameExpr(None [builtins.None])
+        __main__.A)
+      AssignmentStmt:4(
+        NameExpr(y* [l])
+        NameExpr(x [l])))))
+
+[case testAnyType]
+from typing import Any
+x = None # type: Any
+y = x
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Any])
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    Any)
+  AssignmentStmt:3(
+    NameExpr(y* [__main__.y])
+    NameExpr(x [__main__.x])))
+
+[case testMemberVarWithType]
+import typing
+class A:
+  def __init__(self):
+    self.x = None # type: int
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ClassDef:2(
+    A
+    FuncDef:3(
+      __init__
+      Args(
+        Var(self))
+      Block:3(
+        AssignmentStmt:4(
+          MemberExpr:4(
+            NameExpr(self [l])
+            x)
+          NameExpr(None [builtins.None])
+          builtins.int)))))
+
+[case testClassVarWithType]
+import typing
+class A:
+  x = None # type: int
+  x = 1
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ClassDef:2(
+    A
+    AssignmentStmt:3(
+      NameExpr(x [m])
+      NameExpr(None [builtins.None])
+      builtins.int)
+    AssignmentStmt:4(
+      NameExpr(x [m])
+      IntExpr(1))))
+
+[case testFunctionSig]
+from typing import Any
+class A: pass
+def f(x: A) -> A: pass
+def g(x: Any, y: A) -> None:
+  z = x, y
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Any])
+  ClassDef:2(
+    A
+    PassStmt:2())
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def (x: __main__.A) -> __main__.A
+    Block:3(
+      PassStmt:3()))
+  FuncDef:4(
+    g
+    Args(
+      Var(x)
+      Var(y))
+    def (x: Any, y: __main__.A)
+    Block:4(
+      AssignmentStmt:5(
+        NameExpr(z* [l])
+        TupleExpr:5(
+          NameExpr(x [l])
+          NameExpr(y [l]))))))
+
+[case testBaseclass]
+class A: pass
+class B(A): pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    PassStmt:1())
+  ClassDef:2(
+    B
+    BaseType(
+      __main__.A)
+    PassStmt:2()))
+
+[case testMultipleVarDef]
+
+class A: pass
+class B: pass
+a, b = None, None # type: (A, B)
+x = a, b
+[out]
+MypyFile:1(
+  ClassDef:2(
+    A
+    PassStmt:2())
+  ClassDef:3(
+    B
+    PassStmt:3())
+  AssignmentStmt:4(
+    TupleExpr:4(
+      NameExpr(a [__main__.a])
+      NameExpr(b [__main__.b]))
+    TupleExpr:4(
+      NameExpr(None [builtins.None])
+      NameExpr(None [builtins.None]))
+    Tuple[__main__.A, __main__.B])
+  AssignmentStmt:5(
+    NameExpr(x* [__main__.x])
+    TupleExpr:5(
+      NameExpr(a [__main__.a])
+      NameExpr(b [__main__.b]))))
+
+[case testGenericType]
+from typing import TypeVar, Generic, Any
+
+t = TypeVar('t')
+
+class A(Generic[t]): pass
+class B: pass
+x = None # type: A[B]
+y = None # type: A[Any]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic, Any])
+  AssignmentStmt:3(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:3())
+  ClassDef:5(
+    A
+    TypeVars(
+      t)
+    PassStmt:5())
+  ClassDef:6(
+    B
+    PassStmt:6())
+  AssignmentStmt:7(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    __main__.A[__main__.B])
+  AssignmentStmt:8(
+    NameExpr(y [__main__.y])
+    NameExpr(None [builtins.None])
+    __main__.A[Any]))
+
+[case testGenericType2]
+from typing import TypeVar, Generic, Any
+t = TypeVar('t')
+s = TypeVar('s')
+class A(Generic[t, s]): pass
+class B: pass
+x = None # type: A[B, Any]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic, Any])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  AssignmentStmt:3(
+    NameExpr(s* [__main__.s])
+    TypeVarExpr:3())
+  ClassDef:4(
+    A
+    TypeVars(
+      t
+      s)
+    PassStmt:4())
+  ClassDef:5(
+    B
+    PassStmt:5())
+  AssignmentStmt:6(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    __main__.A[__main__.B, Any]))
+
+[case testAssignmentAfterDef]
+
+
+class A: pass
+a = None # type: A
+a = 1
+def f():
+  b = None # type: A
+  b = 1
+[out]
+MypyFile:1(
+  ClassDef:3(
+    A
+    PassStmt:3())
+  AssignmentStmt:4(
+    NameExpr(a [__main__.a])
+    NameExpr(None [builtins.None])
+    __main__.A)
+  AssignmentStmt:5(
+    NameExpr(a [__main__.a])
+    IntExpr(1))
+  FuncDef:6(
+    f
+    Block:6(
+      AssignmentStmt:7(
+        NameExpr(b [l])
+        NameExpr(None [builtins.None])
+        __main__.A)
+      AssignmentStmt:8(
+        NameExpr(b [l])
+        IntExpr(1)))))
+
+[case testCast]
+from typing import TypeVar, Generic, Any, cast
+t = TypeVar('t')
+class c: pass
+class d(Generic[t]): pass
+cast(Any, 1)
+cast(c, 1)
+cast(d[c], c)
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic, Any, cast])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  ClassDef:3(
+    c
+    PassStmt:3())
+  ClassDef:4(
+    d
+    TypeVars(
+      t)
+    PassStmt:4())
+  ExpressionStmt:5(
+    CastExpr:5(
+      IntExpr(1)
+      Any))
+  ExpressionStmt:6(
+    CastExpr:6(
+      IntExpr(1)
+      __main__.c))
+  ExpressionStmt:7(
+    CastExpr:7(
+      NameExpr(c [__main__.c])
+      __main__.d[__main__.c])))
+
+[case testCastToQualifiedTypeAndCast]
+import typing
+import _m
+typing.cast(_m.C, object)
+[file _m.py]
+class C: pass
+[out]
+MypyFile:1(
+  Import:1(typing)
+  Import:2(_m)
+  ExpressionStmt:3(
+    CastExpr:3(
+      NameExpr(object [builtins.object])
+      _m.C)))
+
+[case testLongQualifiedCast]
+import typing
+import _m._n
+typing.cast(_m._n.C, object)
+[file _m/__init__.py]
+[file _m/_n.py]
+class C: pass
+[out]
+MypyFile:1(
+  Import:1(typing)
+  Import:2(_m._n)
+  ExpressionStmt:3(
+    CastExpr:3(
+      NameExpr(object [builtins.object])
+      _m._n.C)))
+
+[case testCastTargetWithTwoTypeArgs]
+from typing import TypeVar, Generic, cast
+t = TypeVar('t')
+s = TypeVar('s')
+class C(Generic[t, s]): pass
+cast(C[str, int], C)
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic, cast])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  AssignmentStmt:3(
+    NameExpr(s* [__main__.s])
+    TypeVarExpr:3())
+  ClassDef:4(
+    C
+    TypeVars(
+      t
+      s)
+    PassStmt:4())
+  ExpressionStmt:5(
+    CastExpr:5(
+      NameExpr(C [__main__.C])
+      __main__.C[builtins.str, builtins.int])))
+
+[case testCastToTupleType]
+from typing import Tuple, cast
+cast(Tuple[int, str], None)
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Tuple, cast])
+  ExpressionStmt:2(
+    CastExpr:2(
+      NameExpr(None [builtins.None])
+      Tuple[builtins.int, builtins.str])))
+
+[case testCastToFunctionType]
+from typing import Callable, cast
+cast(Callable[[int], str], None)
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Callable, cast])
+  ExpressionStmt:2(
+    CastExpr:2(
+      NameExpr(None [builtins.None])
+      def (builtins.int) -> builtins.str)))
+
+[case testCastToStringLiteralType]
+from typing import cast
+cast('int', 1)
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [cast])
+  ExpressionStmt:2(
+    CastExpr:2(
+      IntExpr(1)
+      builtins.int)))
+
+[case testFunctionTypeVariable]
+from typing import TypeVar
+t = TypeVar('t')
+def f(x: t) -> None:
+  y = None # type: t
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def [t] (x: t`-1)
+    Block:3(
+      AssignmentStmt:4(
+        NameExpr(y [l])
+        NameExpr(None [builtins.None])
+        t`-1))))
+
+[case testTwoFunctionTypeVariables]
+from typing import TypeVar
+t = TypeVar('t')
+u = TypeVar('u')
+def f(x: t, y: u, z: t) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  AssignmentStmt:3(
+    NameExpr(u* [__main__.u])
+    TypeVarExpr:3())
+  FuncDef:4(
+    f
+    Args(
+      Var(x)
+      Var(y)
+      Var(z))
+    def [t, u] (x: t`-1, y: u`-2, z: t`-1)
+    Block:4(
+      PassStmt:4())))
+
+[case testNestedGenericFunctionTypeVariable]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class A(Generic[t]): pass
+def f(x: A[t], y) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  ClassDef:3(
+    A
+    TypeVars(
+      t)
+    PassStmt:3())
+  FuncDef:4(
+    f
+    Args(
+      Var(x)
+      Var(y))
+    def [t] (x: __main__.A[t`-1], y: Any)
+    Block:4(
+      PassStmt:4())))
+
+[case testNestedGenericFunctionTypeVariable2]
+from typing import TypeVar, Tuple, Generic
+t = TypeVar('t')
+class A(Generic[t]): pass
+def f(x: Tuple[int, t]) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Tuple, Generic])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  ClassDef:3(
+    A
+    TypeVars(
+      t)
+    PassStmt:3())
+  FuncDef:4(
+    f
+    Args(
+      Var(x))
+    def [t] (x: Tuple[builtins.int, t`-1])
+    Block:4(
+      PassStmt:4())))
+
+[case testNestedGenericFunctionTypeVariable3]
+from typing import TypeVar, Callable, Generic
+t = TypeVar('t')
+class A(Generic[t]): pass
+def f(x: Callable[[int, t], int]) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Callable, Generic])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  ClassDef:3(
+    A
+    TypeVars(
+      t)
+    PassStmt:3())
+  FuncDef:4(
+    f
+    Args(
+      Var(x))
+    def [t] (x: def (builtins.int, t`-1) -> builtins.int)
+    Block:4(
+      PassStmt:4())))
+
+[case testNestedGenericFunctionTypeVariable4]
+from typing import TypeVar, Callable, Generic
+t = TypeVar('t')
+class A(Generic[t]): pass
+def f(x: Callable[[], t]) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Callable, Generic])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  ClassDef:3(
+    A
+    TypeVars(
+      t)
+    PassStmt:3())
+  FuncDef:4(
+    f
+    Args(
+      Var(x))
+    def [t] (x: def () -> t`-1)
+    Block:4(
+      PassStmt:4())))
+
+[case testGenericFunctionTypeVariableInReturnType]
+from typing import TypeVar
+t = TypeVar('t')
+def f() -> t: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  FuncDef:3(
+    f
+    def [t] () -> t`-1
+    Block:3(
+      PassStmt:3())))
+
+[case testSelfType]
+class A:
+  def f(self, o: object) -> None: pass
+[out]
+MypyFile:1(
+  ClassDef:1(
+    A
+    FuncDef:2(
+      f
+      Args(
+        Var(self)
+        Var(o))
+      def (self: __main__.A, o: builtins.object)
+      Block:2(
+        PassStmt:2()))))
+
+[case testNestedGenericFunction]
+from typing import TypeVar
+t = TypeVar('t')
+def f() -> None:
+    def g() -> t: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  FuncDef:3(
+    f
+    def ()
+    Block:3(
+      FuncDef:4(
+        g
+        def [t] () -> t`-1
+        Block:4(
+          PassStmt:4())))))
+
+[case testClassTvar]
+from typing import TypeVar, Generic
+
+t = TypeVar('t')
+
+class c(Generic[t]):
+  def f(self) -> t: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic])
+  AssignmentStmt:3(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:3())
+  ClassDef:5(
+    c
+    TypeVars(
+      t)
+    FuncDef:6(
+      f
+      Args(
+        Var(self))
+      def (self: __main__.c[t`1]) -> t`1
+      Block:6(
+        PassStmt:6()))))
+
+[case testClassTvar2]
+from typing import TypeVar, Generic
+
+t = TypeVar('t')
+s = TypeVar('s')
+
+class c(Generic[t, s]):
+  def f(self, x: s) -> t: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic])
+  AssignmentStmt:3(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:3())
+  AssignmentStmt:4(
+    NameExpr(s* [__main__.s])
+    TypeVarExpr:4())
+  ClassDef:6(
+    c
+    TypeVars(
+      t
+      s)
+    FuncDef:7(
+      f
+      Args(
+        Var(self)
+        Var(x))
+      def (self: __main__.c[t`1, s`2], x: s`2) -> t`1
+      Block:7(
+        PassStmt:7()))))
+
+[case testGenericBaseClass]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class d(Generic[t]): pass
+class c(d[t], Generic[t]): pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  ClassDef:3(
+    d
+    TypeVars(
+      t)
+    PassStmt:3())
+  ClassDef:4(
+    c
+    TypeVars(
+      t)
+    BaseType(
+      __main__.d[t`1])
+    PassStmt:4()))
+
+[case testTupleType]
+from typing import Tuple
+t = None # type: tuple
+t1 = None # type: Tuple[object]
+t2 = None # type: Tuple[int, object]
+[builtins fixtures/tuple.pyi]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Tuple])
+  AssignmentStmt:2(
+    NameExpr(t [__main__.t])
+    NameExpr(None [builtins.None])
+    builtins.tuple[Any])
+  AssignmentStmt:3(
+    NameExpr(t1 [__main__.t1])
+    NameExpr(None [builtins.None])
+    Tuple[builtins.object])
+  AssignmentStmt:4(
+    NameExpr(t2 [__main__.t2])
+    NameExpr(None [builtins.None])
+    Tuple[builtins.int, builtins.object]))
+
+[case testVariableLengthTuple]
+from typing import Tuple
+t = None # type: Tuple[int, ...]
+[builtins fixtures/tuple.pyi]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Tuple])
+  AssignmentStmt:2(
+    NameExpr(t [__main__.t])
+    NameExpr(None [builtins.None])
+    builtins.tuple[builtins.int]))
+
+[case testInvalidTupleType]
+from typing import Tuple
+t = None # type: Tuple[int, str, ...] # E: Unexpected '...'
+[out]
+
+[case testFunctionTypes]
+from typing import Callable
+f = None # type: Callable[[object, int], str]
+g = None # type: Callable[[], None]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Callable])
+  AssignmentStmt:2(
+    NameExpr(f [__main__.f])
+    NameExpr(None [builtins.None])
+    def (builtins.object, builtins.int) -> builtins.str)
+  AssignmentStmt:3(
+    NameExpr(g [__main__.g])
+    NameExpr(None [builtins.None])
+    def ()))
+
+[case testOverloadedFunction]
+from typing import overload
+ at overload
+def f(o: object) -> int: o
+ at overload
+def f(a: str) -> object: a
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [overload])
+  OverloadedFuncDef:2(
+    Overload(def (o: builtins.object) -> builtins.int, \
+             def (a: builtins.str) -> builtins.object)
+    Decorator:2(
+      Var(f)
+      NameExpr(overload [typing.overload])
+      FuncDef:3(
+        f
+        Args(
+          Var(o))
+        def (o: builtins.object) -> builtins.int
+        Block:3(
+          ExpressionStmt:3(
+            NameExpr(o [l])))))
+    Decorator:4(
+      Var(f)
+      NameExpr(overload [typing.overload])
+      FuncDef:5(
+        f
+        Args(
+          Var(a))
+        def (a: builtins.str) -> builtins.object
+        Block:5(
+          ExpressionStmt:5(
+            NameExpr(a [l])))))))
+
+[case testReferenceToOverloadedFunction]
+from typing import overload
+ at overload
+def f() -> None: pass
+ at overload
+def f(x: int) -> None: pass
+x = f
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [overload])
+  OverloadedFuncDef:2(
+    Overload(def (), def (x: builtins.int))
+    Decorator:2(
+      Var(f)
+      NameExpr(overload [typing.overload])
+      FuncDef:3(
+        f
+        def ()
+        Block:3(
+          PassStmt:3())))
+    Decorator:4(
+      Var(f)
+      NameExpr(overload [typing.overload])
+      FuncDef:5(
+        f
+        Args(
+          Var(x))
+        def (x: builtins.int)
+        Block:5(
+          PassStmt:5()))))
+  AssignmentStmt:6(
+    NameExpr(x* [__main__.x])
+    NameExpr(f [__main__.f])))
+
+[case testNestedOverloadedFunction]
+from typing import overload
+def f():
+    @overload
+    def g(): pass
+    @overload
+    def g(x): pass
+    y = g
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [overload])
+  FuncDef:2(
+    f
+    Block:2(
+      OverloadedFuncDef:3(
+        Overload(def () -> Any, def (x: Any) -> Any)
+        Decorator:3(
+          Var(g)
+          NameExpr(overload [typing.overload])
+          FuncDef:4(
+            g
+            Block:4(
+              PassStmt:4())))
+        Decorator:5(
+          Var(g)
+          NameExpr(overload [typing.overload])
+          FuncDef:6(
+            g
+            Args(
+              Var(x))
+            Block:6(
+              PassStmt:6()))))
+      AssignmentStmt:7(
+        NameExpr(y* [l])
+        NameExpr(g [l])))))
+
+[case testImplicitGenericTypeArgs]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+s = TypeVar('s')
+class A(Generic[t, s]): pass
+x = None # type: A
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  AssignmentStmt:3(
+    NameExpr(s* [__main__.s])
+    TypeVarExpr:3())
+  ClassDef:4(
+    A
+    TypeVars(
+      t
+      s)
+    PassStmt:4())
+  AssignmentStmt:5(
+    NameExpr(x [__main__.x])
+    NameExpr(None [builtins.None])
+    __main__.A[Any, Any]))
+
+[case testImplicitTypeArgsAndGenericBaseClass]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+s = TypeVar('s')
+class B(Generic[s]): pass
+class A(B, Generic[t]): pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  AssignmentStmt:3(
+    NameExpr(s* [__main__.s])
+    TypeVarExpr:3())
+  ClassDef:4(
+    B
+    TypeVars(
+      s)
+    PassStmt:4())
+  ClassDef:5(
+    A
+    TypeVars(
+      t)
+    BaseType(
+      __main__.B[Any])
+    PassStmt:5()))
+
+[case testTypeApplication]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class A(Generic[t]): pass
+x = A[int]()
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  ClassDef:3(
+    A
+    TypeVars(
+      t)
+    PassStmt:3())
+  AssignmentStmt:4(
+    NameExpr(x* [__main__.x])
+    CallExpr:4(
+      TypeApplication:4(
+        NameExpr(A [__main__.A])
+        Types(
+          builtins.int))
+      Args())))
+
+[case testTypeApplicationWithTwoTypeArgs]
+from typing import TypeVar, Generic, Any
+t = TypeVar('t')
+s = TypeVar('s')
+class A(Generic[t, s]): pass
+x = A[int, Any]()
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic, Any])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  AssignmentStmt:3(
+    NameExpr(s* [__main__.s])
+    TypeVarExpr:3())
+  ClassDef:4(
+    A
+    TypeVars(
+      t
+      s)
+    PassStmt:4())
+  AssignmentStmt:5(
+    NameExpr(x* [__main__.x])
+    CallExpr:5(
+      TypeApplication:5(
+        NameExpr(A [__main__.A])
+        Types(
+          builtins.int
+          Any))
+      Args())))
+
+[case testFunctionTypeApplication]
+from typing import TypeVar
+t = TypeVar('t')
+def f(x: t) -> None: pass
+f[int](1)
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def [t] (x: t`-1)
+    Block:3(
+      PassStmt:3()))
+  ExpressionStmt:4(
+    CallExpr:4(
+      TypeApplication:4(
+        NameExpr(f [__main__.f])
+        Types(
+          builtins.int))
+      Args(
+        IntExpr(1)))))
+
+[case testTypeApplicationWithStringLiteralType]
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class A(Generic[t]): pass
+A['int']()
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic])
+  AssignmentStmt:2(
+    NameExpr(t* [__main__.t])
+    TypeVarExpr:2())
+  ClassDef:3(
+    A
+    TypeVars(
+      t)
+    PassStmt:3())
+  ExpressionStmt:4(
+    CallExpr:4(
+      TypeApplication:4(
+        NameExpr(A [__main__.A])
+        Types(
+          builtins.int))
+      Args())))
+
+[case testVarArgsAndKeywordArgs]
+def g(*x: int, y: str = ''): pass
+[out]
+MypyFile:1(
+  FuncDef:1(
+    g
+    MaxPos(0)
+    Args(
+      Var(y))
+    def (*x: builtins.int, *, y: builtins.str =) -> Any
+    Init(
+      AssignmentStmt:1(
+        NameExpr(y [l])
+        StrExpr()))
+    VarArg(
+      Var(x))
+    Block:1(
+      PassStmt:1())))
+
+[case testQualifiedGeneric]
+from typing import TypeVar
+import typing
+T = TypeVar('T')
+class A(typing.Generic[T]): pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  Import:2(typing)
+  AssignmentStmt:3(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:3())
+  ClassDef:4(
+    A
+    TypeVars(
+      T)
+    PassStmt:4()))
+
+[case testQualifiedTypevar]
+import typing
+T = typing.TypeVar('T')
+def f(x: T) -> T: pass
+[out]
+MypyFile:1(
+  Import:1(typing)
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2())
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def [T] (x: T`-1) -> T`-1
+    Block:3(
+      PassStmt:3())))
+
+[case testAliasedTypevar]
+from typing import TypeVar as tv
+T = tv('T')
+def f(x: T) -> T: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar : tv])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2())
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def [T] (x: T`-1) -> T`-1
+    Block:3(
+      PassStmt:3())))
+
+[case testLocalTypevar]
+from typing import TypeVar
+def f():
+    T = TypeVar('T')
+    def g(x: T) -> T: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  FuncDef:2(
+    f
+    Block:2(
+      AssignmentStmt:3(
+        NameExpr(T* [l])
+        TypeVarExpr:3())
+      FuncDef:4(
+        g
+        Args(
+          Var(x))
+        def [T] (x: T`-1) -> T`-1
+        Block:4(
+          PassStmt:4())))))
+
+[case testClassLevelTypevar]
+from typing import TypeVar
+class A:
+    T = TypeVar('T')
+    def g(self, x: T) -> T: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  ClassDef:2(
+    A
+    AssignmentStmt:3(
+      NameExpr(T* [m])
+      TypeVarExpr:3())
+    FuncDef:4(
+      g
+      Args(
+        Var(self)
+        Var(x))
+      def [T] (self: __main__.A, x: T`-1) -> T`-1
+      Block:4(
+        PassStmt:4()))))
+
+[case testImportTypevar]
+from typing import Generic
+from _m import T
+class A(Generic[T]):
+    y = None # type: T
+[file _m.py]
+from typing import TypeVar
+T = TypeVar('T')
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Generic])
+  ImportFrom:2(_m, [T])
+  ClassDef:3(
+    A
+    TypeVars(
+      T)
+    AssignmentStmt:4(
+      NameExpr(y [m])
+      NameExpr(None [builtins.None])
+      T`1)))
+
+[case testQualifiedReferenceToTypevarInClass]
+from typing import Generic
+import _m
+class A(Generic[_m.T]):
+    a = None # type: _m.T
+    def f(self, x: _m.T):
+        b = None # type: _m.T
+[file _m.py]
+from typing import TypeVar
+T = TypeVar('T')
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Generic])
+  Import:2(_m)
+  ClassDef:3(
+    A
+    TypeVars(
+      _m.T)
+    AssignmentStmt:4(
+      NameExpr(a [m])
+      NameExpr(None [builtins.None])
+      _m.T`1)
+    FuncDef:5(
+      f
+      Args(
+        Var(self)
+        Var(x))
+      def (self: __main__.A[_m.T`1], x: _m.T`1) -> Any
+      Block:5(
+        AssignmentStmt:6(
+          NameExpr(b [l])
+          NameExpr(None [builtins.None])
+          _m.T`1)))))
+
+[case testQualifiedReferenceToTypevarInFunctionSignature]
+import _m
+def f(x: _m.T) -> None:
+    a = None # type: _m.T
+[file _m.py]
+from typing import TypeVar
+T = TypeVar('T')
+[out]
+MypyFile:1(
+  Import:1(_m)
+  FuncDef:2(
+    f
+    Args(
+      Var(x))
+    def [_m.T] (x: _m.T`-1)
+    Block:2(
+      AssignmentStmt:3(
+        NameExpr(a [l])
+        NameExpr(None [builtins.None])
+        _m.T`-1))))
+
+[case testFunctionCommentAnnotation]
+from typing import Any
+def f(x): # type: (int) -> Any
+  x = 1
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Any])
+  FuncDef:2(
+    f
+    Args(
+      Var(x))
+    def (x: builtins.int) -> Any
+    Block:2(
+      AssignmentStmt:3(
+        NameExpr(x [l])
+        IntExpr(1)))))
+
+[case testMethodCommentAnnotation]
+import typing
+class A:
+  def f(self, x): # type: (int) -> str
+    x = 1
+[out]
+MypyFile:1(
+  Import:1(typing)
+  ClassDef:2(
+    A
+    FuncDef:3(
+      f
+      Args(
+        Var(self)
+        Var(x))
+      def (self: __main__.A, x: builtins.int) -> builtins.str
+      Block:3(
+        AssignmentStmt:4(
+          NameExpr(x [l])
+          IntExpr(1))))))
+
+[case testTypevarWithValues]
+from typing import TypeVar, Any
+T = TypeVar('T', int, str)
+S = TypeVar('S', Any, int, str)
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Any])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2(
+      Values(
+        builtins.int
+        builtins.str)))
+  AssignmentStmt:3(
+    NameExpr(S* [__main__.S])
+    TypeVarExpr:3(
+      Values(
+        Any
+        builtins.int
+        builtins.str))))
+
+[case testTypevarWithValuesAndVariance]
+from typing import TypeVar
+T = TypeVar('T', int, str, covariant=True)
+[builtins fixtures/bool.pyi]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2(
+      Variance(COVARIANT)
+      Values(
+        builtins.int
+        builtins.str))))
+
+[case testTypevarWithBound]
+from typing import TypeVar
+T = TypeVar('T', bound=int)
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2(
+      UpperBound(builtins.int))))
+
+[case testGenericFunctionWithValueSet]
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> T: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2(
+      Values(
+        builtins.int
+        builtins.str)))
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def [T in (builtins.int, builtins.str)] (x: T`-1) -> T`-1
+    Block:3(
+      PassStmt:3())))
+
+[case testGenericClassWithValueSet]
+from typing import TypeVar, Generic
+T = TypeVar('T', int, str)
+class C(Generic[T]): pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2(
+      Values(
+        builtins.int
+        builtins.str)))
+  ClassDef:3(
+    C
+    TypeVars(
+      T in (builtins.int, builtins.str))
+    PassStmt:3()))
+
+[case testGenericFunctionWithBound]
+from typing import TypeVar
+T = TypeVar('T', bound=int)
+def f(x: T) -> T: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2(
+      UpperBound(builtins.int)))
+  FuncDef:3(
+    f
+    Args(
+      Var(x))
+    def [T <: builtins.int] (x: T`-1) -> T`-1
+    Block:3(
+      PassStmt:3())))
+
+[case testGenericClassWithBound]
+from typing import TypeVar, Generic
+T = TypeVar('T', bound=int)
+class C(Generic[T]): pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar, Generic])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2(
+      UpperBound(builtins.int)))
+  ClassDef:3(
+    C
+    TypeVars(
+      T <: builtins.int)
+    PassStmt:3()))
+
+[case testSimpleDucktypeDecorator]
+from typing import _promote
+ at _promote(str)
+class S: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [_promote])
+  ClassDef:2(
+    S
+    Promote(builtins.str)
+    Decorators(
+      PromoteExpr:2(builtins.str))
+    PassStmt:3()))
+
+[case testUnionType]
+from typing import Union
+def f(x: Union[int, str]) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Union])
+  FuncDef:2(
+    f
+    Args(
+      Var(x))
+    def (x: Union[builtins.int, builtins.str])
+    Block:2(
+      PassStmt:2())))
+
+[case testUnionTypeWithNoneItem]
+from typing import Union
+def f(x: Union[int, None]) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Union])
+  FuncDef:2(
+    f
+    Args(
+      Var(x))
+    def (x: builtins.int)
+    Block:2(
+      PassStmt:2())))
+
+[case testUnionTypeWithNoneItemAndTwoItems]
+from typing import Union
+def f(x: Union[int, None, str]) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Union])
+  FuncDef:2(
+    f
+    Args(
+      Var(x))
+    def (x: Union[builtins.int, builtins.str])
+    Block:2(
+      PassStmt:2())))
+
+[case testUnionTypeWithSingleItem]
+from typing import Union
+def f(x: Union[int]) -> None: pass
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Union])
+  FuncDef:2(
+    f
+    Args(
+      Var(x))
+    def (x: builtins.int)
+    Block:2(
+      PassStmt:2())))
+
+[case testOptionalTypes]
+from typing import Optional
+x = 1  # type: Optional[int]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Optional])
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    IntExpr(1)
+    builtins.int))
+
+[case testInvalidOptionalType]
+from typing import Optional
+x = 1  # type: Optional[int, str]  # E: Optional[...] must have exactly one type argument
+y = 1  # type: Optional  # E: Optional[...] must have exactly one type argument
+[out]
+
+[case testCoAndContravariantTypeVar]
+from typing import TypeVar
+T = TypeVar('T', covariant=True)
+S = TypeVar('S', contravariant=True)
+[builtins fixtures/bool.pyi]
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [TypeVar])
+  AssignmentStmt:2(
+    NameExpr(T* [__main__.T])
+    TypeVarExpr:2(
+      Variance(COVARIANT)))
+  AssignmentStmt:3(
+    NameExpr(S* [__main__.S])
+    TypeVarExpr:3(
+      Variance(CONTRAVARIANT))))
+
+[case testTupleExpressionAsType]
+def f(x: (int, int)) -> None: pass
+[out]
+main:1: error: Invalid tuple literal type
+
+[case tesQualifiedTypeNameBasedOnAny]
+from typing import Any
+x = 0 # type: Any
+z = 0 # type: x.y
+[out]
+MypyFile:1(
+  ImportFrom:1(typing, [Any])
+  AssignmentStmt:2(
+    NameExpr(x [__main__.x])
+    IntExpr(0)
+    Any)
+  AssignmentStmt:3(
+    NameExpr(z [__main__.z])
+    IntExpr(0)
+    Any))
diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test
new file mode 100644
index 0000000..baa5be5
--- /dev/null
+++ b/test-data/unit/stubgen.test
@@ -0,0 +1,565 @@
+[case testEmptyFile]
+[out]
+
+[case testSingleFunction]
+def f():
+    x = 1
+[out]
+def f(): ...
+
+[case testTwoFunctions]
+def f(a, b):
+    x = 1
+def g(arg):
+    pass
+[out]
+def f(a, b): ...
+def g(arg): ...
+
+[case testDefaultArgInt]
+def f(a, b=2): ...
+def g(b=-1, c=0): ...
+[out]
+def f(a, b: int = ...): ...
+def g(b: int = ..., c: int = ...): ...
+
+[case testDefaultArgNone]
+def f(x=None): ...
+[out]
+from typing import Any, Optional
+
+def f(x: Optional[Any] = ...): ...
+
+[case testDefaultArgBool]
+def f(x=True, y=False): ...
+[out]
+def f(x: bool = ..., y: bool = ...): ...
+
+[case testDefaultArgStr]
+def f(x='foo'): ...
+[out]
+def f(x: str = ...): ...
+
+[case testDefaultArgBytes]
+def f(x=b'foo'): ...
+[out]
+def f(x: bytes = ...): ...
+
+[case testDefaultArgFloat]
+def f(x=1.2): ...
+[out]
+def f(x: float = ...): ...
+
+[case testDefaultArgOther]
+def f(x=ord): ...
+[out]
+from typing import Any
+
+def f(x: Any = ...): ...
+
+[case testVarArgs]
+def f(x, *y): ...
+[out]
+def f(x, *y): ...
+
+[case testKwVarArgs]
+def f(x, **y): ...
+[out]
+def f(x, **y): ...
+
+[case testClass]
+class A:
+    def f(self, x):
+        x = 1
+def g(): ...
+[out]
+class A:
+    def f(self, x): ...
+
+def g(): ...
+
+[case testVariable]
+x = 1
+[out]
+x = ...  # type: int
+
+[case testMultipleVariable]
+x = y = 1
+[out]
+x = ...  # type: int
+y = ...  # type: int
+
+[case testClassVariable]
+class C:
+    x = 1
+[out]
+class C:
+    x = ...  # type: int
+
+[case testSelfAssignment]
+class C:
+    def __init__(self):
+        self.x = 1
+        x.y = 2
+[out]
+class C:
+    x = ...  # type: int
+    def __init__(self) -> None: ...
+
+[case testSelfAndClassBodyAssignment]
+x = 1
+class C:
+    x = 1
+    def __init__(self):
+        self.x = 1
+        self.x = 1
+[out]
+x = ...  # type: int
+
+class C:
+    x = ...  # type: int
+    def __init__(self) -> None: ...
+
+[case testEmptyClass]
+class A: ...
+[out]
+class A: ...
+
+[case testPrivateFunction]
+def _f(): ...
+def g(): ...
+[out]
+def g(): ...
+
+[case testPrivateMethod]
+class A:
+    def _f(self): ...
+[out]
+class A: ...
+
+[case testPrivateVar]
+_x = 1
+class A:
+    _y = 1
+[out]
+class A: ...
+
+[case testSpecialInternalVar]
+__all__ = []
+__author__ = ''
+__version__ = ''
+[out]
+
+[case testBaseClass]
+class A: ...
+class B(A): ...
+[out]
+class A: ...
+class B(A): ...
+
+[case testDecoratedFunction]
+ at decorator
+def foo(x): ...
+[out]
+def foo(x): ...
+
+[case testMultipleAssignment]
+x, y = 1, 2
+[out]
+from typing import Any
+
+x = ...  # type: Any
+y = ...  # type: Any
+
+[case testMultipleAssignment2]
+[x, y] = 1, 2
+[out]
+from typing import Any
+
+x = ...  # type: Any
+y = ...  # type: Any
+
+[case testKeywordOnlyArg]
+def f(x, *, y=1): ...
+def g(x, *, y=1, z=2): ...
+[out]
+def f(x, *, y: int = ...): ...
+def g(x, *, y: int = ..., z: int = ...): ...
+
+[case testProperty]
+class A:
+    @property
+    def f(self):
+        return 1
+    @f.setter
+    def f(self, x): ...
+[out]
+class A:
+    @property
+    def f(self): ...
+    @f.setter
+    def f(self, x): ...
+
+[case testStaticMethod]
+class A:
+    @staticmethod
+    def f(x): ...
+[out]
+class A:
+    @staticmethod
+    def f(x): ...
+
+[case testClassMethod]
+class A:
+    @classmethod
+    def f(cls): ...
+[out]
+class A:
+    @classmethod
+    def f(cls): ...
+
+[case testIfMainCheck]
+def a(): ...
+if __name__ == '__main__':
+    x = 1
+    def f(): ...
+def b(): ...
+[out]
+def a(): ...
+def b(): ...
+
+[case testImportStar]
+from x import *
+from a.b import *
+def f(): ...
+[out]
+from x import *
+from a.b import *
+
+def f(): ...
+
+[case testNoSpacesBetweenEmptyClasses]
+class X:
+    def g(self): ...
+class A: ...
+class B: ...
+class C:
+    def f(self): ...
+[out]
+class X:
+    def g(self): ...
+
+class A: ...
+class B: ...
+
+class C:
+    def f(self): ...
+
+[case testExceptionBaseClasses]
+class A(Exception): ...
+class B(ValueError): ...
+[out]
+class A(Exception): ...
+class B(ValueError): ...
+
+[case testOmitSomeSpecialMethods]
+class A:
+    def __str__(self): ...
+    def __repr__(self): ...
+    def __eq__(self): ...
+    def __getstate__(self): ...
+    def __setstate__(self, state): ...
+[out]
+class A:
+    def __eq__(self): ...
+
+[case testOmitDefsNotInAll_import]
+__all__ = [] + ['f']
+def f(): ...
+def g(): ...
+[out]
+def f(): ...
+
+[case testVarDefsNotInAll_import]
+__all__ = [] + ['f', 'g']
+def f(): ...
+x = 1
+y = 1
+def g(): ...
+[out]
+def f(): ...
+def g(): ...
+
+[case testIncludeClassNotInAll_import]
+__all__ = [] + ['f']
+def f(): ...
+class A: ...
+[out]
+def f(): ...
+
+class A: ...
+
+[case testAllAndClass_import]
+__all__ = ['A']
+class A:
+    x = 1
+    def f(self): ...
+[out]
+class A:
+    x = ...  # type: int
+    def f(self): ...
+
+[case testMultiplePrivateDefs]
+class A: ...
+_x = 1
+_y = 1
+_z = 1
+class C: ...
+[out]
+class A: ...
+class C: ...
+
+[case testIncludeFromImportIfInAll_import]
+from re import match, search, sub
+__all__ = ['match', 'sub', 'x']
+x = 1
+[out]
+from re import match as match, sub as sub
+
+x = ...  # type: int
+
+[case testExportModule_import]
+import re
+__all__ = ['re', 'x']
+x = 1
+y = 2
+[out]
+import re as re
+
+x = ...  # type: int
+
+[case testExportModuleAs_import]
+import re as rex
+__all__ = ['rex', 'x']
+x = 1
+y = 2
+[out]
+import re as rex
+
+x = ...  # type: int
+
+[case testExportModuleInPackage_import]
+import urllib.parse as p
+__all__ = ['p']
+[out]
+import urllib.parse as p
+
+[case testExportModuleInPackageUnsupported_import]
+import urllib.parse
+__all__ = ['urllib']
+[out]
+# Names in __all__ with no definition:
+#   urllib
+
+[case testRelativeImportAll]
+from .x import *
+[out]
+from .x import *
+
+[case testCommentForUndefinedName_import]
+__all__ = ['f', 'x', 'C', 'g']
+def f(): ...
+x = 1
+class C:
+    def g(self): ...
+[out]
+def f(): ...
+
+x = ...  # type: int
+
+class C:
+    def g(self): ...
+
+# Names in __all__ with no definition:
+#   g
+
+[case testIgnoreSlots]
+class A:
+    __slots__ = ()
+[out]
+class A: ...
+
+[case testSkipPrivateProperty]
+class A:
+    @property
+    def _foo(self): ...
+[out]
+class A: ...
+
+[case testSkipPrivateStaticAndClassMethod]
+class A:
+    @staticmethod
+    def _foo(): ...
+    @classmethod
+    def _bar(cls): ...
+[out]
+class A: ...
+
+[case testNamedtuple]
+import collections, x
+X = collections.namedtuple('X', ['a', 'b'])
+[out]
+from collections import namedtuple
+
+X = namedtuple('X', ['a', 'b'])
+
+[case testNamedtupleAltSyntax]
+from collections import namedtuple, x
+X = namedtuple('X', 'a b')
+[out]
+from collections import namedtuple
+
+X = namedtuple('X', 'a b')
+
+[case testNamedtupleWithUnderscore]
+from collections import namedtuple as _namedtuple
+def f(): ...
+X = _namedtuple('X', 'a b')
+def g(): ...
+[out]
+from collections import namedtuple as _namedtuple
+from collections import namedtuple
+
+def f(): ...
+
+X = namedtuple('X', 'a b')
+
+def g(): ...
+
+[case testNamedtupleBaseClass]
+import collections, x
+_X = collections.namedtuple('_X', ['a', 'b'])
+class Y(_X): ...
+[out]
+from collections import namedtuple
+
+_X = namedtuple('_X', ['a', 'b'])
+
+class Y(_X): ...
+
+[case testArbitraryBaseClass]
+import x
+class D(x.C): ...
+[out]
+import x
+
+class D(x.C): ...
+
+[case testArbitraryBaseClass]
+import x.y
+class D(x.y.C): ...
+[out]
+import x.y
+
+class D(x.y.C): ...
+
+[case testUnqualifiedArbitraryBaseClassWithNoDef]
+class A(int): ...
+[out]
+class A(int): ...
+
+[case testUnqualifiedArbitraryBaseClass]
+from x import X
+class A(X): ...
+[out]
+from x import X
+
+class A(X): ...
+
+[case testUnqualifiedArbitraryBaseClassWithImportAs]
+from x import X as _X
+class A(_X): ...
+[out]
+from x import X as _X
+
+class A(_X): ...
+
+[case testObjectBaseClass]
+class A(object): ...
+[out]
+class A: ...
+
+[case testEmptyLines]
+def x(): ...
+def f():
+    class A:
+        def f(self):
+            self.x = 1
+def g(): ...
+[out]
+def x(): ...
+def f(): ...
+def g(): ...
+
+[case testNestedClass]
+class A:
+    class B:
+        x = 1
+        def f(self): ...
+    def g(self): ...
+[out]
+class A:
+    class B:
+        x = ...  # type: int
+        def f(self): ...
+    def g(self): ...
+
+[case testExportViaRelativeImport]
+from .api import get
+[out]
+from .api import get as get
+
+[case testExportViaRelativePackageImport]
+from .packages.urllib3.contrib import parse
+[out]
+from .packages.urllib3.contrib import parse as parse
+
+[case testNoExportViaRelativeImport]
+from . import get
+[out]
+
+[case testRelativeImportAndBase]
+from .x import X
+class A(X):
+     pass
+[out]
+from .x import X as X
+
+class A(X): ...
+
+[case testDuplicateDef]
+def syslog(a): pass
+def syslog(a): pass
+[out]
+def syslog(a): ...
+
+[case testAsyncAwait_fast_parser]
+async def f(a):
+   x = await y
+[out]
+def f(a): ...
+
+[case testInferOptionalOnlyFunc]
+class A:
+    x = None
+    def __init__(self, a=None) -> None:
+        self.x = []
+[out]
+from typing import Any, Optional
+
+class A:
+    x = ...  # type: Any
+    def __init__(self, a: Optional[Any] = ...) -> None: ...
+
+-- More features/fixes:
+--   do not export deleted names
diff --git a/test-data/unit/typexport-basic.test b/test-data/unit/typexport-basic.test
new file mode 100644
index 0000000..a51d844
--- /dev/null
+++ b/test-data/unit/typexport-basic.test
@@ -0,0 +1,1159 @@
+-- Test cases for exporting node types from the type checker.
+--
+-- Each test case consists of at least two sections.
+-- The first section contains [case NAME-skip] followed by the input code,
+-- while the second section contains [out] followed by the output from the type
+-- checker.
+--
+-- The first line of input code should be a regexp in comment that describes
+-- the information to dump (prefix with ##). The regexp is matched against
+-- the following items:
+--
+--   * each name of an expression node
+--   * each type string of a node (e.g. OpExpr)
+--
+-- Lines starting with "--" in this file will be ignored.
+
+
+-- Expressions
+-- -----------
+
+
+[case testConstructorCall]
+import typing
+A()
+B()
+class A: pass
+class B: pass
+[out]
+CallExpr(2) : A
+NameExpr(2) : def () -> A
+CallExpr(3) : B
+NameExpr(3) : def () -> B
+
+[case testLiterals]
+import typing
+5
+2.3
+'foo'
+[builtins fixtures/primitives.pyi]
+[out]
+IntExpr(2) : builtins.int
+FloatExpr(3) : builtins.float
+StrExpr(4) : builtins.str
+
+[case testNameExpression]
+
+a = None # type: A
+a # node
+def f(aa: 'A') -> None:
+  b = None # type: B
+  aa # node
+  b  # node
+class A:
+  def g(self) -> None:
+    self # node
+class B: pass
+[out]
+NameExpr(3) : A
+NameExpr(6) : A
+NameExpr(7) : B
+NameExpr(10) : A
+
+[case testEllipsis]
+import typing
+...
+[out]
+EllipsisExpr(2) : builtins.ellipsis
+
+[case testMemberAccess]
+## MemberExpr|CallExpr
+
+a = None # type: A
+a.m
+a.f
+a.f()
+class A:
+  m = None # type: A
+  def f(self) -> 'B': pass
+class B: pass
+[out]
+MemberExpr(4) : A
+MemberExpr(5) : def () -> B
+CallExpr(6) : B
+MemberExpr(6) : def () -> B
+
+[case testCastExpression]
+## CastExpr|[a-z]
+from typing import Any, cast
+d = None # type: Any
+b = None # type: B
+class A: pass
+class B(A): pass
+cast(A, d)
+cast(A, b)
+cast(B, b)
+[out]
+CastExpr(7) : A
+NameExpr(7) : Any
+CastExpr(8) : A
+NameExpr(8) : B
+CastExpr(9) : B
+NameExpr(9) : B
+
+[case testArithmeticOps]
+## OpExpr
+import typing
+a = 1 + 2
+1.2 * 3
+2.2 - 3
+1 / 2
+[file builtins.py]
+class object:
+    def __init__(self) -> None: pass
+class function: pass
+class int:
+    def __add__(self, x: int) -> int: pass
+    def __truediv__(self, x: int) -> float: pass
+class float:
+    def __mul__(self, x: int) -> float: pass
+    def __sub__(self, x: int) -> float: pass
+class type: pass
+class str: pass
+[out]
+OpExpr(3) : builtins.int
+OpExpr(4) : builtins.float
+OpExpr(5) : builtins.float
+OpExpr(6) : builtins.float
+
+[case testComparisonOps]
+## ComparisonExpr
+import typing
+1 == object()
+1 == 2
+2 < 3
+1 < 2 < 3
+8 > 3
+4 < 6 > 2
+[file builtins.py]
+class object:
+    def __init__(self) -> None: pass
+class int:
+    def __eq__(self, x: object) -> bool: pass
+    def __lt__(self, x: int) -> bool: pass
+    def __gt__(self, x: int) -> int: pass
+class bool: pass
+class type: pass
+class function: pass
+class str: pass
+[out]
+ComparisonExpr(3) : builtins.bool
+ComparisonExpr(4) : builtins.bool
+ComparisonExpr(5) : builtins.bool
+ComparisonExpr(6) : builtins.bool
+ComparisonExpr(7) : builtins.int
+ComparisonExpr(8) : builtins.object
+
+[case testBooleanOps]
+## OpExpr|UnaryExpr
+import typing
+a = 1
+a and a
+a or a
+not a
+[builtins fixtures/bool.pyi]
+[out]
+OpExpr(4) : builtins.int
+OpExpr(5) : builtins.int
+UnaryExpr(6) : builtins.bool
+
+[case testBooleanOpsOnBools]
+## OpExpr|UnaryExpr
+import typing
+a = bool()
+a and a
+a or a
+not a
+[builtins fixtures/bool.pyi]
+[out]
+OpExpr(4) : builtins.bool
+OpExpr(5) : builtins.bool
+UnaryExpr(6) : builtins.bool
+
+[case testFunctionCall]
+## CallExpr
+from typing import Tuple
+f(
+  A(),
+  B())
+class A: pass
+class B: pass
+def f(a: A, b: B) -> Tuple[A, B]: pass
+[builtins fixtures/tuple-simple.pyi]
+[out]
+CallExpr(3) : Tuple[A, B]
+CallExpr(4) : A
+CallExpr(5) : B
+
+
+-- Statements
+-- ----------
+
+
+[case testSimpleAssignment]
+from typing import Any
+a = None # type: A
+b = a # type: Any
+b = a
+a = b
+
+class A: pass
+[out]
+NameExpr(3) : A
+NameExpr(4) : A
+NameExpr(4) : Any
+NameExpr(5) : A
+NameExpr(5) : Any
+
+[case testMemberAssignment]
+from typing import Any
+class A:
+  a = None # type: A
+  b = None # type: Any
+  def f(self) -> None:
+    self.b = self.a
+    self.a.a = self.b
+[out]
+MemberExpr(6) : A
+MemberExpr(6) : Any
+NameExpr(6) : A
+NameExpr(6) : A
+MemberExpr(7) : A
+MemberExpr(7) : A
+MemberExpr(7) : A
+NameExpr(7) : A
+NameExpr(7) : A
+
+[case testIf]
+
+a = None # type: bool
+if a:
+  1
+elif not a:
+  1
+[builtins fixtures/bool.pyi]
+[out]
+NameExpr(3) : builtins.bool
+IntExpr(4) : builtins.int
+NameExpr(5) : builtins.bool
+UnaryExpr(5) : builtins.bool
+IntExpr(6) : builtins.int
+
+[case testWhile]
+
+a = None # type: bool
+while a:
+  a
+[builtins fixtures/bool.pyi]
+[out]
+NameExpr(3) : builtins.bool
+NameExpr(4) : builtins.bool
+
+
+-- Simple type inference
+-- ---------------------
+
+
+[case testInferSingleType]
+import typing
+x = ()
+[builtins fixtures/primitives.pyi]
+[out]
+NameExpr(2) : Tuple[]
+TupleExpr(2) : Tuple[]
+
+[case testInferTwoTypes]
+## NameExpr
+import typing
+(s,
+i) = 'x', 1
+[builtins fixtures/primitives.pyi]
+[out]
+NameExpr(3) : builtins.str
+NameExpr(4) : builtins.int
+
+[case testInferSingleLocalVarType]
+import typing
+def f() -> None:
+    x = ()
+[builtins fixtures/primitives.pyi]
+[out]
+NameExpr(3) : Tuple[]
+TupleExpr(3) : Tuple[]
+
+
+-- Basic generics
+-- --------------
+
+
+[case testImplicitBoundTypeVarsForMethod]
+## MemberExpr
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+  def f(self) -> T: pass
+class B: pass
+def g() -> None:
+  a = None # type: A[B]
+  f = a.f
+[out]
+MemberExpr(9) : def () -> B
+
+[case testImplicitBoundTypeVarsForSelfMethodReference]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+  def f(self) -> T:
+    self.f()
+[out]
+CallExpr(5) : T`1
+MemberExpr(5) : def () -> T`1
+NameExpr(5) : A[T`1]
+
+[case testGenericFunctionCallWithTypeApp-skip]
+## CallExpr|TypeApplication|NameExpr
+from typing import Any, TypeVar, Tuple
+T = TypeVar('T')
+class A: pass
+f[A](A())
+f[Any](A())
+def f(a: T) -> Tuple[T, T]: pass
+[builtins fixtures/tuple.pyi]
+[out]
+CallExpr(5) : A
+CallExpr(5) : Tuple[A, A]
+NameExpr(5) : def () -> A
+NameExpr(5) : def (a: A) -> Tuple[A, A]
+TypeApplication(5) : def (a: A) -> Tuple[A, A]
+CallExpr(6) : A
+CallExpr(6) : Tuple[Any, Any]
+NameExpr(6) : def () -> A
+NameExpr(6) : def (a: Any) -> Tuple[Any, Any]
+TypeApplication(6) : def (a: Any) -> Tuple[Any, Any]
+
+-- NOTE: Type applications are not supported for generic methods, so the
+--       following test cases are commented out.
+
+--[case testGenericMethodCallWithTypeApp]
+--## CallExpr|MemberExpr|TypeApplication
+--from typing import Any, TypeVar, Tuple
+--T = TypeVar('T')
+--class A:
+--  def f(self, a: T) -> Tuple[T, T]: pass
+--a.f[A](a)
+--a.f[Any](a)
+--a = None # type: A
+--[builtins fixtures/tuple.py]
+--[out]
+--CallExpr(2) : Tuple[A, A]
+--MemberExpr(2) : def (A a) -> Tuple[A, A]
+--TypeApplication(2) : def (A a) -> Tuple[A, A]
+--CallExpr(3) : Tuple[Any, Any]
+--MemberExpr(3) : def (any a) -> Tuple[Any, Any]
+--TypeApplication(3) : def (any a) -> Tuple[Any, Any]
+
+--[case testGenericMethodCallInGenericTypeWithTypeApp]
+--## CallExpr|MemberExpr|TypeApplication
+--from typing import Any, TypeVar, Generic, Tuple
+--T = TypeVar('T')
+--S = TypeVar('S')
+--class B: pass
+--class C: pass
+--a.f[B](b)
+--a.f[Any](b)
+--class A(Generic[T]):
+--  def f(self, a: S) -> Tuple[T, S]: pass
+--a = None # type: A[C]
+--b = None # type: B
+--[builtins fixtures/tuple.py]
+--[out]
+--CallExpr(6) : Tuple[C, B]
+--MemberExpr(6) : def (B a) -> Tuple[C, B]
+--TypeApplication(6) : def (B a) -> Tuple[C, B]
+--CallExpr(7) : Tuple[C, Any]
+--MemberExpr(7) : def (any a) -> Tuple[C, Any]
+--TypeApplication(7) : def (any a) -> Tuple[C, Any]
+
+[case testGenericTypeVariableInference]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class A(Generic[T]):
+  def __init__(self, a: T) -> None: pass
+class B: pass
+A(A(B()))
+[out]
+CallExpr(6) : A[A[B]]
+CallExpr(6) : A[B]
+CallExpr(6) : B
+NameExpr(6) : def (a: A[B]) -> A[A[B]]
+NameExpr(6) : def (a: B) -> A[B]
+NameExpr(6) : def () -> B
+
+
+-- Generic inheritance
+-- -------------------
+
+
+[case testInheritedMethodReferenceWithGenericInheritance]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class C: pass
+class A(Generic[T]):
+  def f(self, a: T) -> None: pass
+class B(A[C]):
+  def g(self, c: C) -> None:
+    self.f(c)
+[out]
+CallExpr(8) : void
+MemberExpr(8) : def (a: C)
+NameExpr(8) : C
+NameExpr(8) : B
+
+[case testInheritedMethodReferenceWithGenericSubclass]
+from typing import TypeVar, Generic
+S = TypeVar('S')
+T = TypeVar('T')
+class C: pass
+class A(Generic[S, T]):
+  def f(self, a: C) -> None: pass
+class B(A[C, T], Generic[T]):
+  def g(self, c: C) -> None:
+    self.f(c)
+[out]
+CallExpr(9) : void
+MemberExpr(9) : def (a: C)
+NameExpr(9) : C
+NameExpr(9) : B[T`1]
+
+[case testExternalReferenceWithGenericInheritance]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+class C: pass
+class A(Generic[T]):
+  def f(self, a: T) -> None: pass
+class B(A[C]): pass
+b = None # type: B
+c = None # type: C
+b.f(c)
+[out]
+CallExpr(9) : void
+MemberExpr(9) : def (a: C)
+NameExpr(9) : B
+NameExpr(9) : C
+
+
+-- Implicit Any types
+-- ------------------
+
+
+[case testDynamicallyTypedFunction]
+
+def f(x):
+  y = x + o
+  z = o
+  z
+o = None # type: object
+[out]
+NameExpr(3) : builtins.object
+NameExpr(3) : Any
+NameExpr(3) : Any
+OpExpr(3) : Any
+NameExpr(4) : builtins.object
+NameExpr(4) : Any
+NameExpr(5) : Any
+
+[case testDynamicallyTypedMethod]
+
+class A:
+  def f(self, x):
+    y = (
+         o)  # Place y and o on separate lines
+    x
+    y
+o = None # type: object
+[out]
+NameExpr(4) : Any
+NameExpr(5) : builtins.object
+NameExpr(6) : Any
+NameExpr(7) : Any
+
+[case testDynamicallyTypedConstructor]
+
+class A:
+  def __init__(self, x):
+    y = o
+    x
+    y
+o = None # type: object
+[out]
+NameExpr(4) : builtins.object
+NameExpr(4) : Any
+NameExpr(5) : Any
+NameExpr(6) : Any
+
+[case testCallInDynamicallyTypedFunction]
+
+def f():
+  g(o)
+def g(a: object) -> object: pass
+o = None # type: object
+[out]
+CallExpr(3) : Any
+NameExpr(3) : def (a: builtins.object) -> builtins.object
+NameExpr(3) : builtins.object
+
+[case testExpressionInDynamicallyTypedFn]
+import typing
+def f():
+  x = None
+  x.f()
+[out]
+CallExpr(4) : Any
+MemberExpr(4) : Any
+NameExpr(4) : Any
+
+[case testGenericCall]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+def f() -> None:
+  a1 = A(b) # type: A[B]
+  a2 = A(b) # type: A[object]
+class A(Generic[T]):
+  def __init__(self, a: T) -> None: pass
+class B: pass
+b = None # type: B
+[out]
+CallExpr(4) : A[B]
+NameExpr(4) : def (a: B) -> A[B]
+NameExpr(4) : B
+CallExpr(5) : A[builtins.object]
+NameExpr(5) : def (a: builtins.object) -> A[builtins.object]
+NameExpr(5) : B
+
+[case testGenericCallInDynamicallyTypedFunction]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+def f():
+  A()
+class A(Generic[T]): pass
+[out]
+CallExpr(4) : Any
+NameExpr(4) : def [T] () -> A[T`1]
+
+[case testGenericCallInDynamicallyTypedFunction2]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+def f():
+  A(f)
+class A(Generic[T]):
+    def __init__(self, x: T) -> None: pass
+[out]
+CallExpr(4) : Any
+NameExpr(4) : def [T] (x: T`1) -> A[T`1]
+NameExpr(4) : def () -> Any
+
+[case testGenericCallInDynamicallyTypedFunction3]
+from typing import TypeVar
+t = TypeVar('t')
+def f():
+  g(None)
+def g(x: t) -> t: pass
+[out]
+CallExpr(4) : Any
+NameExpr(4) : def [t] (x: t`-1) -> t`-1
+
+
+-- Generic types and type inference
+-- --------------------------------
+
+
+[case testInferenceInArgumentContext]
+## CallExpr
+from typing import TypeVar, Generic
+T = TypeVar('T')
+f(g())
+f(h(b))
+f(h(c))
+
+b = None # type: B
+c = None # type: C
+
+def f(a: 'A[B]') -> None: pass
+
+def g() -> 'A[T]': pass
+def h(a: T) -> 'A[T]': pass
+
+class A(Generic[T]): pass
+class B: pass
+class C(B): pass
+[out]
+CallExpr(4) : void
+CallExpr(4) : A[B]
+CallExpr(5) : void
+CallExpr(5) : A[B]
+CallExpr(6) : void
+CallExpr(6) : A[B]
+
+[case testInferGenericTypeForLocalVariable]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+def f() -> None:
+  a = A(b)
+  a
+  a2, a3 = A(b), A(c)
+  a2
+  a3
+b = None # type: B
+c = None # type: C
+class A(Generic[T]):
+  def __init__(self, x: T) -> None: pass
+class B: pass
+class C: pass
+[out]
+CallExpr(4) : A[B]
+NameExpr(4) : def (x: B) -> A[B]
+NameExpr(4) : A[B]
+NameExpr(4) : B
+NameExpr(5) : A[B]
+CallExpr(6) : A[B]
+CallExpr(6) : A[C]
+NameExpr(6) : def (x: B) -> A[B]
+NameExpr(6) : def (x: C) -> A[C]
+NameExpr(6) : A[B]
+NameExpr(6) : A[C]
+NameExpr(6) : B
+NameExpr(6) : C
+NameExpr(7) : A[B]
+NameExpr(8) : A[C]
+
+[case testNestedGenericCalls]
+from typing import TypeVar, Generic
+T = TypeVar('T')
+S = TypeVar('S')
+def h() -> None:
+  g(f(c))
+
+c = None # type: C
+
+class A(Generic[T]): pass
+class B(Generic[T]): pass
+class C: pass
+def f(a: T) -> A[T]: pass
+def g(a: S) -> B[S]: pass
+[out]
+CallExpr(5) : A[C]
+CallExpr(5) : B[A[C]]
+NameExpr(5) : C
+NameExpr(5) : def (a: C) -> A[C]
+NameExpr(5) : def (a: A[C]) -> B[A[C]]
+
+[case testInferListLiterals]
+from typing import List
+a = [] # type: List[A]
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+ListExpr(2) : builtins.list[A]
+
+[case testInferGenericTypeInTypeAnyContext]
+from typing import Any
+a = [] # type: Any
+[builtins fixtures/list.pyi]
+[out]
+ListExpr(2) : builtins.list[Any]
+
+[case testHigherOrderFunction]
+from typing import TypeVar, Callable, List
+t = TypeVar('t')
+s = TypeVar('s')
+map(
+    f,
+    [A()])
+def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass
+class A: pass
+class B: pass
+def f(a: A) -> B: pass
+[builtins fixtures/list.pyi]
+[out]
+CallExpr(4) : builtins.list[B]
+NameExpr(4) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B]
+NameExpr(5) : def (a: A) -> B
+CallExpr(6) : A
+ListExpr(6) : builtins.list[A]
+NameExpr(6) : def () -> A
+
+
+-- Lambdas
+-- -------
+
+
+[case testLambdaWithTypeInferredFromContext]
+from typing import Callable
+f = lambda x: x.a # type: Callable[[B], A]
+class A: pass
+class B:
+  a = None # type: A
+[out]
+FuncExpr(2) : def (B) -> A
+MemberExpr(2) : A
+NameExpr(2) : B
+
+[case testLambdaWithInferredType]
+## FuncExpr|NameExpr
+import typing
+f = lambda: 1
+[out]
+FuncExpr(3) : def () -> builtins.int
+NameExpr(3) : def () -> builtins.int
+
+[case testLambdaWithInferredType2]
+## FuncExpr|NameExpr
+import typing
+f = lambda: [1]
+[builtins fixtures/list.pyi]
+[out]
+FuncExpr(3) : def () -> builtins.list[builtins.int]
+NameExpr(3) : def () -> builtins.list[builtins.int]
+
+[case testLambdaWithInferredType2]
+from typing import List, Callable
+f = lambda x: [] # type: Callable[[B], List[A]]
+class A: pass
+class B:
+  a = None # type: A
+[builtins fixtures/list.pyi]
+[out]
+FuncExpr(2) : def (B) -> builtins.list[A]
+ListExpr(2) : builtins.list[A]
+
+[case testLambdaAndHigherOrderFunction]
+from typing import TypeVar, Callable, List
+t = TypeVar('t')
+s = TypeVar('s')
+l = None # type: List[A]
+map(
+  lambda x: f(x), l)
+def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass
+class A: pass
+class B: pass
+def f(a: A) -> B: pass
+[builtins fixtures/list.pyi]
+[out]
+CallExpr(5) : builtins.list[B]
+NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B]
+CallExpr(6) : B
+FuncExpr(6) : def (A) -> B
+NameExpr(6) : def (a: A) -> B
+NameExpr(6) : builtins.list[A]
+NameExpr(6) : A
+
+[case testLambdaAndHigherOrderFunction2]
+## FuncExpr|NameExpr|ListExpr
+from typing import TypeVar, List, Callable
+t = TypeVar('t')
+s = TypeVar('s')
+l = None # type: List[A]
+map(
+  lambda x: [f(x)], l)
+def map(f: Callable[[t], List[s]], a: List[t]) -> List[s]: pass
+class A: pass
+class B: pass
+def f(a: A) -> B: pass
+[builtins fixtures/list.pyi]
+[out]
+NameExpr(6) : def (f: def (A) -> builtins.list[B], a: builtins.list[A]) -> builtins.list[B]
+FuncExpr(7) : def (A) -> builtins.list[B]
+ListExpr(7) : builtins.list[B]
+NameExpr(7) : def (a: A) -> B
+NameExpr(7) : builtins.list[A]
+NameExpr(7) : A
+
+[case testLambdaInListAndHigherOrderFunction]
+from typing import TypeVar, Callable, List
+t = TypeVar('t')
+s = TypeVar('s')
+l = None # type: List[A]
+map(
+  [lambda x: x],
+  l)
+def map(f: List[Callable[[t], s]], a: List[t]) -> List[s]: pass
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+-- TODO We probably should not silently infer 'Any' types in statically typed
+--      context. Perhaps just fail instead?
+CallExpr(5) : builtins.list[Any]
+NameExpr(5) : def (f: builtins.list[def (A) -> Any], a: builtins.list[A]) -> builtins.list[Any]
+FuncExpr(6) : def (A) -> A
+ListExpr(6) : builtins.list[def (A) -> Any]
+NameExpr(6) : A
+NameExpr(7) : builtins.list[A]
+
+[case testLambdaAndHigherOrderFunction3]
+from typing import TypeVar, Callable, List
+t = TypeVar('t')
+s = TypeVar('s')
+l = None # type: List[A]
+map(
+  lambda x: x.b,
+  l)
+def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass
+class A:
+  b = None # type: B
+class B: pass
+[builtins fixtures/list.pyi]
+[out]
+CallExpr(5) : builtins.list[B]
+NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B]
+FuncExpr(6) : def (A) -> B
+MemberExpr(6) : B
+NameExpr(6) : A
+NameExpr(7) : builtins.list[A]
+
+[case testLambdaAndHigherOrderFunctionAndKeywordArgs]
+from typing import TypeVar, Callable, List
+t = TypeVar('t')
+s = TypeVar('s')
+l = None # type: List[A]
+map(
+  a=l,
+  f=lambda x: x.b)
+def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass
+class A:
+  b = None # type: B
+class B: pass
+[builtins fixtures/list.pyi]
+[out]
+CallExpr(5) : builtins.list[B]
+NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B]
+NameExpr(6) : builtins.list[A]
+FuncExpr(7) : def (A) -> B
+MemberExpr(7) : B
+NameExpr(7) : A
+
+
+-- Boolean operations
+-- ------------------
+
+
+[case testBooleanOr]
+from typing import List
+a = None # type: List[A]
+a or []
+a = a or []
+a = [] or a
+class A: pass
+[builtins fixtures/list.pyi]
+[out]
+ListExpr(3) : builtins.list[A]
+NameExpr(3) : builtins.list[A]
+OpExpr(3) : builtins.list[A]
+ListExpr(4) : builtins.list[A]
+NameExpr(4) : builtins.list[A]
+NameExpr(4) : builtins.list[A]
+OpExpr(4) : builtins.list[A]
+ListExpr(5) : builtins.list[A]
+NameExpr(5) : builtins.list[A]
+NameExpr(5) : builtins.list[A]
+OpExpr(5) : builtins.list[A]
+
+
+-- Class attributes
+-- ----------------
+
+
+[case testUnboundMethod]
+## MemberExpr
+import typing
+class A:
+    def f(self) -> None: pass
+A.f
+[out]
+MemberExpr(5) : def (self: A)
+
+[case testUnboundMethodWithImplicitSig]
+## MemberExpr
+import typing
+class A:
+    def f(self): pass
+A.f
+[out]
+MemberExpr(5) : def (self: Any) -> Any
+
+[case testOverloadedUnboundMethod]
+## MemberExpr
+from typing import overload
+class A:
+    @overload
+    def f(self) -> None: pass
+    @overload
+    def f(self, x: object) -> None: pass
+A.f
+[out]
+MemberExpr(8) : Overload(def (self: A), def (self: A, x: builtins.object))
+
+[case testOverloadedUnboundMethodWithImplicitSig]
+## MemberExpr
+from typing import overload
+class A:
+    @overload
+    def f(self): pass
+    @overload
+    def f(self, x): pass
+A.f
+[out]
+MemberExpr(8) : Overload(def (self: Any) -> Any, def (self: Any, x: Any) -> Any)
+
+[case testUnboundMethodWithInheritance]
+## MemberExpr
+import typing
+class A:
+    def __init__(self) -> None: pass
+    def f(self) -> None: pass
+class B(A):
+    pass
+B.f
+[out]
+MemberExpr(8) : def (self: A)
+
+[case testUnboundGenericMethod]
+## MemberExpr
+from typing import TypeVar
+t = TypeVar('t')
+class B: pass
+class A:
+    def f(self, x: t) -> None: pass
+A.f(A(), B())
+[out]
+MemberExpr(7) : def (self: A, x: B)
+
+[case testUnboundMethodOfGenericClass]
+## MemberExpr
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class B: pass
+class A(Generic[t]):
+    def f(self, x: t) -> None: pass
+A.f
+a_b = A() # type: A[B]
+A.f(a_b, B())
+[out]
+MemberExpr(7) : def [t] (self: A[t`1], x: t`1)
+MemberExpr(9) : def (self: A[B], x: B)
+
+[case testUnboundOverloadedMethodOfGenericClass]
+## CallExpr
+from typing import TypeVar, Generic, overload
+t = TypeVar('t')
+class B: pass
+class A(Generic[t]):
+    @overload
+    def f(self, x: t) -> t: pass
+    @overload
+    def f(self) -> object: pass
+ab, b = None, None # type: (A[B], B)
+A.f(ab, b)
+[out]
+CallExpr(11) : B
+
+[case testUnboundMethodOfGenericClassWithImplicitSig]
+## MemberExpr
+from typing import TypeVar, Generic
+t = TypeVar('t')
+class B: pass
+class A(Generic[t]):
+    def f(self, x): pass
+A.f(None, None)
+[out]
+MemberExpr(7) : def (self: Any, x: Any) -> Any
+
+[case testGenericMethodOfGenericClass]
+## MemberExpr
+from typing import TypeVar, Generic
+t = TypeVar('t')
+s = TypeVar('s')
+class B: pass
+class A(Generic[t]):
+    def f(self, y: s) -> None: pass
+ab = None # type: A[B]
+o = None # type: object
+A.f(ab, o)
+[out]
+MemberExpr(10) : def (self: A[B], y: builtins.object)
+
+
+-- Type variables with value restriction
+-- -------------------------------------
+
+
+[case testTypeVariableWithValueRestriction]
+## NameExpr
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> None: pass
+f(1)
+f('x')
+[out]
+NameExpr(5) : def (x: builtins.int)
+NameExpr(6) : def (x: builtins.str)
+
+[case testTypeVariableWithValueRestrictionAndSubtype]
+## NameExpr|CallExpr
+from typing import TypeVar
+T = TypeVar('T', int, str)
+def f(x: T) -> T: pass
+class S(str): pass
+s = None # type: S
+f(s)
+[out]
+CallExpr(7) : builtins.str
+NameExpr(7) : def (x: builtins.str) -> builtins.str
+NameExpr(7) : S
+
+
+-- Binary operations
+-- -----------------
+
+
+[case testBinaryOperatorWithAnyLeftOperand]
+## OpExpr
+from typing import Any, cast
+class B:
+    def __add__(self, x: int) -> str: pass
+class A:
+    def __radd__(self, x: B) -> int: pass
+cast(Any, 1) + A()
+B() + A()
+[out]
+OpExpr(7) : Any
+OpExpr(8) : builtins.int
+
+[case testBinaryOperatorWithAnyRightOperand]
+## OpExpr
+from typing import Any, cast
+class A:
+    def __add__(self, x: str) -> int: pass
+A() + cast(Any, 1)
+[out]
+OpExpr(5) : Any
+
+
+-- Callable overloading
+-- --------------------
+
+
+[case testOverloadedFunctionType]
+## CallExpr
+from typing import overload
+ at overload
+def f(x: int) -> str: pass
+ at overload
+def f(x: str) -> int: pass
+f(1)
+f('')
+[out]
+CallExpr(7) : builtins.str
+CallExpr(8) : builtins.int
+
+[case testOverlappingOverloadedFunctionType]
+## CallExpr
+from typing import overload
+class A: pass
+class B(A): pass
+ at overload
+def f(x: B) -> B: pass
+ at overload
+def f(x: A) -> A: pass
+a = None # type: A
+b = None # type: B
+f(a)
+f(b)
+[out]
+CallExpr(11) : A
+CallExpr(12) : B
+
+
+
+[case testOverloadedErasedType]
+from typing import Callable
+from typing import List
+from typing import overload
+from typing import TypeVar
+
+T = TypeVar("T")
+V = TypeVar("V")
+
+def fun(s: int) -> int: pass
+
+def m(fun: Callable[[T], V], iter: List[T]) -> None: pass
+
+nums = [1] # type: List[int]
+m(fun,
+  nums)
+[builtins fixtures/list.pyi]
+[out]
+IntExpr(13) : builtins.int
+ListExpr(13) : builtins.list[builtins.int]
+CallExpr(14) : void
+NameExpr(14) : def (s: builtins.int) -> builtins.int
+NameExpr(14) : def (fun: def (builtins.int) -> builtins.int, iter: builtins.list[builtins.int])
+NameExpr(15) : builtins.list[builtins.int]
+
+
+-- Special cases
+-- -------------
+
+
+[case testImplicitDataAttributeInit]
+## NameExpr
+import typing
+class A:
+    def __init__(self) -> None:
+        self.x = (
+                  A())
+[out]
+NameExpr(5) : A
+NameExpr(6) : def () -> A
+
+[case testListMultiplicationInContext]
+## ListExpr|OpExpr|IntExpr
+from typing import List
+a = [None] * 3 # type: List[str]
+[builtins fixtures/list.pyi]
+[out]
+IntExpr(3) : builtins.int
+ListExpr(3) : builtins.list[builtins.str]
+OpExpr(3) : builtins.list[builtins.str]
+
+
+-- TODO
+--
+-- test expressions
+--   list literal
+--   tuple literal
+--   unary minus
+--   indexing
+--   super expression
+--   more complex lambda (multiple arguments etc.)
+--   list comprehension
+--   generator expression
+-- overloads
+-- other things
+--   type inference
+--   default argument value
+--   for loop variable
+--   exception variable
+--   varargs
+-- generics
+--   explicit types
+-- type of 'None' (currently stripped, but sometimes we may want to dump it)
diff --git a/test-requirements.txt b/test-requirements.txt
new file mode 100644
index 0000000..5ce8f80
--- /dev/null
+++ b/test-requirements.txt
@@ -0,0 +1,9 @@
+flake8
+flake8-bugbear; python_version >= '3.5'
+flake8-pyi; python_version >= '3.5'
+lxml; sys_platform != 'win32'
+typed-ast>=0.6.3; sys_platform != 'win32' or python_version >= '3.5'
+pytest>=2.8
+pytest-xdist>=1.13
+pytest-cov>=2.4.0
+typing>=3.5.2; python_version < '3.5'
diff --git a/tmp-test-dirs/.gitignore b/tmp-test-dirs/.gitignore
new file mode 100644
index 0000000..e6579d8
--- /dev/null
+++ b/tmp-test-dirs/.gitignore
@@ -0,0 +1,4 @@
+# This directory is used to store temporary directories for the testsuite.
+# If anything manages to exist here, it means python crashed instead of
+# calling tempfile.TemporaryDirectory's cleanup while unwinding.
+# Therefore, don't actually provide any ignore patterns.
diff --git a/typeshed/stdlib/2/UserDict.pyi b/typeshed/stdlib/2/UserDict.pyi
index a30c2e9..885a62a 100644
--- a/typeshed/stdlib/2/UserDict.pyi
+++ b/typeshed/stdlib/2/UserDict.pyi
@@ -1,8 +1,9 @@
 from typing import (Any, Container, Dict, Generic, Iterable, Iterator, List,
-                    Mapping, Sized, Tuple, TypeVar, overload)
+                    Mapping, Optional, Sized, Tuple, TypeVar, Union, overload)
 
 _KT = TypeVar('_KT')
 _VT = TypeVar('_VT')
+_T = TypeVar('_T')
 
 class UserDict(Dict[_KT, _VT], Generic[_KT, _VT]):
     data = ...  # type: Mapping[_KT, _VT]
@@ -14,12 +15,15 @@ class UserDict(Dict[_KT, _VT], Generic[_KT, _VT]):
 class IterableUserDict(UserDict[_KT, _VT], Generic[_KT, _VT]):
     ...
 
-class DictMixin(Sized, Iterable[_KT], Container[_KT], Generic[_KT, _VT]):
+class DictMixin(Iterable[_KT], Container[_KT], Sized, Generic[_KT, _VT]):
     def has_key(self, key: _KT) -> bool: ...
 
     # From  typing.Mapping[_KT, _VT]
     # (can't inherit because of keys())
-    def get(self, k: _KT, default: _VT = ...) -> _VT: ...
+    @overload
+    def get(self, k: _KT) -> Optional[_VT]: ...
+    @overload
+    def get(self, k: _KT, default: Union[_VT, _T]) -> Union[_VT, _T]: ...
     def values(self) -> List[_VT]: ...
     def items(self) -> List[Tuple[_KT, _VT]]: ...
     def iterkeys(self) -> Iterator[_KT]: ...
diff --git a/typeshed/stdlib/2/__builtin__.pyi b/typeshed/stdlib/2/__builtin__.pyi
index e8d4ae0..59b01e5 100644
--- a/typeshed/stdlib/2/__builtin__.pyi
+++ b/typeshed/stdlib/2/__builtin__.pyi
@@ -43,8 +43,9 @@ class object:
     def __format__(self, format_spec: str) -> str: ...
     def __getattribute__(self, name: str) -> Any: ...
     def __delattr__(self, name: str) -> None: ...
+    def __sizeof__(self) -> int: ...
 
-class type:
+class type(object):
     __bases__ = ...  # type: Tuple[type, ...]
     __name__ = ...  # type: str
     __module__ = ...  # type: str
@@ -67,6 +68,8 @@ class type:
     # implementation seems to be returning a list.
     def mro(self) -> List[type]: ...
     def __subclasses__(self: _TT) -> List[_TT]: ...
+    def __instancecheck__(self, instance: Any) -> bool: ...
+    def __subclasscheck__(self, subclass: type) -> bool: ...
 
 class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
     @overload
@@ -119,6 +122,7 @@ class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
     def __int__(self) -> int: ...
     def __abs__(self) -> int: ...
     def __hash__(self) -> int: ...
+    def __nonzero__(self) -> bool: ...
 
 class float(SupportsFloat, SupportsInt, SupportsAbs[float]):
     @overload
@@ -166,6 +170,7 @@ class float(SupportsFloat, SupportsInt, SupportsAbs[float]):
     def __float__(self) -> float: ...
     def __abs__(self) -> float: ...
     def __hash__(self) -> int: ...
+    def __nonzero__(self) -> bool: ...
 
 class complex(SupportsAbs[float]):
     @overload
@@ -201,6 +206,13 @@ class complex(SupportsAbs[float]):
     def __str__(self) -> str: ...
     def __abs__(self) -> float: ...
     def __hash__(self) -> int: ...
+    def __nonzero__(self) -> bool: ...
+
+class super(object):
+    @overload
+    def __init__(self, t: Any, obj: Any) -> None: ...
+    @overload
+    def __init__(self, t: Any) -> None: ...
 
 class basestring(metaclass=ABCMeta): ...
 
@@ -282,7 +294,7 @@ class unicode(basestring, Sequence[unicode]):
     def __hash__(self) -> int: ...
 
 class str(basestring, Sequence[str]):
-    def __init__(self, object: object) -> None: ...
+    def __init__(self, object: object='') -> None: ...
     def capitalize(self) -> str: ...
     def center(self, width: int, fillchar: str = ...) -> str: ...
     def count(self, x: unicode) -> int: ...
@@ -453,11 +465,14 @@ class bytearray(MutableSequence[int]):
 class bool(int, SupportsInt, SupportsFloat):
     def __init__(self, o: object = ...) -> None: ...
 
-class slice:
-    start = 0
-    step = 0
-    stop = 0
-    def __init__(self, start: int, stop: int = 0, step: int = 0) -> None: ...
+class slice(object):
+    start = ...  # type: Optional[int]
+    step = ...  # type: Optional[int]
+    stop = ...  # type: Optional[int]
+    @overload
+    def __init__(self, stop: int = None) -> None: ...
+    @overload
+    def __init__(self, start: int = None, stop: int = None, step: int = None) -> None: ...
 
 class tuple(Sequence[_T_co], Generic[_T_co]):
     def __init__(self, iterable: Iterable[_T_co] = ...) -> None: ...
@@ -538,7 +553,6 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     def has_key(self, k: _KT) -> bool: ...
     def clear(self) -> None: ...
     def copy(self) -> Dict[_KT, _VT]: ...
-    def get(self, k: _KT, default: _VT = None) -> _VT: ...
     def pop(self, k: _KT, default: _VT = ...) -> _VT: ...
     def popitem(self) -> Tuple[_KT, _VT]: ...
     def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ...
@@ -546,9 +560,6 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     def update(self, m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
     @overload
     def update(self, m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ...
-    def keys(self) -> List[_KT]: ...
-    def values(self) -> List[_VT]: ...
-    def items(self) -> List[Tuple[_KT, _VT]]: ...
     def iterkeys(self) -> Iterator[_KT]: ...
     def itervalues(self) -> Iterator[_VT]: ...
     def iteritems(self) -> Iterator[Tuple[_KT, _VT]]: ...
@@ -565,7 +576,6 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     def __getitem__(self, k: _KT) -> _VT: ...
     def __setitem__(self, k: _KT, v: _VT) -> None: ...
     def __delitem__(self, v: _KT) -> None: ...
-    def __contains__(self, o: object) -> bool: ...
     def __iter__(self) -> Iterator[_KT]: ...
     def __str__(self) -> str: ...
 
@@ -696,7 +706,7 @@ def hasattr(o: Any, name: unicode) -> bool: ...
 def hash(o: object) -> int: ...
 def hex(i: int) -> str: ...  # TODO __index__
 def id(o: object) -> int: ...
-def input(prompt: unicode = ...) -> Any: ...
+def input(prompt: Any = ...) -> Any: ...
 def intern(string: str) -> str: ...
 @overload
 def iter(iterable: Iterable[_T]) -> Iterator[_T]: ...
@@ -722,7 +732,7 @@ def min(iterable: Iterable[_T], key: Callable[[_T], Any] = ...) -> _T: ...
 @overload
 def next(i: Iterator[_T]) -> _T: ...
 @overload
-def next(i: Iterator[_T], default: _T) -> _T: ...
+def next(i: Iterator[_T], default: _VT) -> Union[_T, _VT]: ...
 def oct(i: int) -> str: ...  # TODO __index__
 @overload
 def open(file: str, mode: str = 'r', buffering: int = ...) -> BinaryIO: ...
@@ -744,7 +754,7 @@ def pow(x: float, y: float) -> float: ...
 def pow(x: float, y: float, z: float) -> float: ...
 def quit(code: int = ...) -> None: ...
 def range(x: int, y: int = 0, step: int = 1) -> List[int]: ...
-def raw_input(prompt: unicode = ...) -> str: ...
+def raw_input(prompt: Any = ...) -> str: ...
 
 @overload
 def reduce(function: Callable[[_T, _S], _T], iterable: Iterable[_S], initializer: _T) -> _T: ...
diff --git a/typeshed/stdlib/2/ast.pyi b/typeshed/stdlib/2/ast.pyi
index 95bda54..244b230 100644
--- a/typeshed/stdlib/2/ast.pyi
+++ b/typeshed/stdlib/2/ast.pyi
@@ -21,7 +21,7 @@ __version__ = ...  # type: str
 PyCF_ONLY_AST = ...  # type: int
 
 
-def parse(source: Union[str, bytes], filename: Union[str, bytes] = ..., mode: str = ...) -> AST: ...
+def parse(source: Union[str, bytes], filename: Union[str, bytes] = ..., mode: str = ...) -> Module: ...
 def copy_location(new_node: AST, old_node: AST) -> AST: ...
 def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ...
 def fix_missing_locations(node: AST) -> AST: ...
diff --git a/typeshed/stdlib/2/builtins.pyi b/typeshed/stdlib/2/builtins.pyi
index e8d4ae0..59b01e5 100644
--- a/typeshed/stdlib/2/builtins.pyi
+++ b/typeshed/stdlib/2/builtins.pyi
@@ -43,8 +43,9 @@ class object:
     def __format__(self, format_spec: str) -> str: ...
     def __getattribute__(self, name: str) -> Any: ...
     def __delattr__(self, name: str) -> None: ...
+    def __sizeof__(self) -> int: ...
 
-class type:
+class type(object):
     __bases__ = ...  # type: Tuple[type, ...]
     __name__ = ...  # type: str
     __module__ = ...  # type: str
@@ -67,6 +68,8 @@ class type:
     # implementation seems to be returning a list.
     def mro(self) -> List[type]: ...
     def __subclasses__(self: _TT) -> List[_TT]: ...
+    def __instancecheck__(self, instance: Any) -> bool: ...
+    def __subclasscheck__(self, subclass: type) -> bool: ...
 
 class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
     @overload
@@ -119,6 +122,7 @@ class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
     def __int__(self) -> int: ...
     def __abs__(self) -> int: ...
     def __hash__(self) -> int: ...
+    def __nonzero__(self) -> bool: ...
 
 class float(SupportsFloat, SupportsInt, SupportsAbs[float]):
     @overload
@@ -166,6 +170,7 @@ class float(SupportsFloat, SupportsInt, SupportsAbs[float]):
     def __float__(self) -> float: ...
     def __abs__(self) -> float: ...
     def __hash__(self) -> int: ...
+    def __nonzero__(self) -> bool: ...
 
 class complex(SupportsAbs[float]):
     @overload
@@ -201,6 +206,13 @@ class complex(SupportsAbs[float]):
     def __str__(self) -> str: ...
     def __abs__(self) -> float: ...
     def __hash__(self) -> int: ...
+    def __nonzero__(self) -> bool: ...
+
+class super(object):
+    @overload
+    def __init__(self, t: Any, obj: Any) -> None: ...
+    @overload
+    def __init__(self, t: Any) -> None: ...
 
 class basestring(metaclass=ABCMeta): ...
 
@@ -282,7 +294,7 @@ class unicode(basestring, Sequence[unicode]):
     def __hash__(self) -> int: ...
 
 class str(basestring, Sequence[str]):
-    def __init__(self, object: object) -> None: ...
+    def __init__(self, object: object='') -> None: ...
     def capitalize(self) -> str: ...
     def center(self, width: int, fillchar: str = ...) -> str: ...
     def count(self, x: unicode) -> int: ...
@@ -453,11 +465,14 @@ class bytearray(MutableSequence[int]):
 class bool(int, SupportsInt, SupportsFloat):
     def __init__(self, o: object = ...) -> None: ...
 
-class slice:
-    start = 0
-    step = 0
-    stop = 0
-    def __init__(self, start: int, stop: int = 0, step: int = 0) -> None: ...
+class slice(object):
+    start = ...  # type: Optional[int]
+    step = ...  # type: Optional[int]
+    stop = ...  # type: Optional[int]
+    @overload
+    def __init__(self, stop: int = None) -> None: ...
+    @overload
+    def __init__(self, start: int = None, stop: int = None, step: int = None) -> None: ...
 
 class tuple(Sequence[_T_co], Generic[_T_co]):
     def __init__(self, iterable: Iterable[_T_co] = ...) -> None: ...
@@ -538,7 +553,6 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     def has_key(self, k: _KT) -> bool: ...
     def clear(self) -> None: ...
     def copy(self) -> Dict[_KT, _VT]: ...
-    def get(self, k: _KT, default: _VT = None) -> _VT: ...
     def pop(self, k: _KT, default: _VT = ...) -> _VT: ...
     def popitem(self) -> Tuple[_KT, _VT]: ...
     def setdefault(self, k: _KT, default: _VT = ...) -> _VT: ...
@@ -546,9 +560,6 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     def update(self, m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
     @overload
     def update(self, m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ...
-    def keys(self) -> List[_KT]: ...
-    def values(self) -> List[_VT]: ...
-    def items(self) -> List[Tuple[_KT, _VT]]: ...
     def iterkeys(self) -> Iterator[_KT]: ...
     def itervalues(self) -> Iterator[_VT]: ...
     def iteritems(self) -> Iterator[Tuple[_KT, _VT]]: ...
@@ -565,7 +576,6 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     def __getitem__(self, k: _KT) -> _VT: ...
     def __setitem__(self, k: _KT, v: _VT) -> None: ...
     def __delitem__(self, v: _KT) -> None: ...
-    def __contains__(self, o: object) -> bool: ...
     def __iter__(self) -> Iterator[_KT]: ...
     def __str__(self) -> str: ...
 
@@ -696,7 +706,7 @@ def hasattr(o: Any, name: unicode) -> bool: ...
 def hash(o: object) -> int: ...
 def hex(i: int) -> str: ...  # TODO __index__
 def id(o: object) -> int: ...
-def input(prompt: unicode = ...) -> Any: ...
+def input(prompt: Any = ...) -> Any: ...
 def intern(string: str) -> str: ...
 @overload
 def iter(iterable: Iterable[_T]) -> Iterator[_T]: ...
@@ -722,7 +732,7 @@ def min(iterable: Iterable[_T], key: Callable[[_T], Any] = ...) -> _T: ...
 @overload
 def next(i: Iterator[_T]) -> _T: ...
 @overload
-def next(i: Iterator[_T], default: _T) -> _T: ...
+def next(i: Iterator[_T], default: _VT) -> Union[_T, _VT]: ...
 def oct(i: int) -> str: ...  # TODO __index__
 @overload
 def open(file: str, mode: str = 'r', buffering: int = ...) -> BinaryIO: ...
@@ -744,7 +754,7 @@ def pow(x: float, y: float) -> float: ...
 def pow(x: float, y: float, z: float) -> float: ...
 def quit(code: int = ...) -> None: ...
 def range(x: int, y: int = 0, step: int = 1) -> List[int]: ...
-def raw_input(prompt: unicode = ...) -> str: ...
+def raw_input(prompt: Any = ...) -> str: ...
 
 @overload
 def reduce(function: Callable[[_T, _S], _T], iterable: Iterable[_S], initializer: _T) -> _T: ...
diff --git a/typeshed/stdlib/2/codecs.pyi b/typeshed/stdlib/2/codecs.pyi
index 0b2b49a..4579378 100644
--- a/typeshed/stdlib/2/codecs.pyi
+++ b/typeshed/stdlib/2/codecs.pyi
@@ -30,7 +30,7 @@ _decode_type = Callable[[_encoded], _decoded]  # signature of Codec().decode
 _stream_reader_type = Callable[[BinaryIO], 'StreamReader']  # signature of StreamReader __init__
 _stream_writer_type = Callable[[BinaryIO], 'StreamWriter']  # signature of StreamWriter __init__
 _incremental_encoder_type = Callable[[], 'IncrementalEncoder']  # signature of IncrementalEncoder __init__
-_incremental_decode_type = Callable[[], 'IncrementalDecoder']  # signature of IncrementalDecoder __init__
+_incremental_decoder_type = Callable[[], 'IncrementalDecoder']  # signature of IncrementalDecoder __init__
 
 
 def encode(obj: _decoded, encoding: str = ..., errors: str = ...) -> _encoded:
@@ -41,22 +41,22 @@ def decode(obj: _encoded, encoding: str = ..., errors: str = ...) -> _decoded:
 def lookup(encoding: str) -> 'CodecInfo':
     ...
 class CodecInfo(Tuple[_encode_type, _decode_type, _stream_reader_type, _stream_writer_type]):
-    def __init__(self, encode: _encode_type, decode: _decode_type, streamreader: _stream_reader_type = ..., streamwriter: _stream_writer_type = ..., incrementalencoder: _incremental_encoder_type = ..., incrementaldecoder: _incremental_decode_type = ..., name: str = ...) -> None: ...
+    def __init__(self, encode: _encode_type, decode: _decode_type, streamreader: _stream_reader_type = ..., streamwriter: _stream_writer_type = ..., incrementalencoder: _incremental_encoder_type = ..., incrementaldecoder: _incremental_decoder_type = ..., name: str = ...) -> None: ...
     encode = ...  # type: _encode_type
     decode = ...  # type: _decode_type
     streamreader = ...  # type: _stream_reader_type
     streamwriter = ...  # type: _stream_writer_type
     incrementalencoder = ...  # type: _incremental_encoder_type
-    incrementaldecoder = ...  # type: _incremental_decode_type
+    incrementaldecoder = ...  # type: _incremental_decoder_type
     name = ...  # type: str
 
 def getencoder(encoding: str) -> _encode_type:
     ...
-def getdecoder(encoding: str) -> _encode_type:
+def getdecoder(encoding: str) -> _decode_type:
     ...
 def getincrementalencoder(encoding: str) -> _incremental_encoder_type:
     ...
-def getincrementaldecoder(encoding: str) -> _incremental_encoder_type:
+def getincrementaldecoder(encoding: str) -> _incremental_decoder_type:
     ...
 def getreader(encoding: str) -> _stream_reader_type:
     ...
diff --git a/typeshed/stdlib/2/collections.pyi b/typeshed/stdlib/2/collections.pyi
index 74fc2a8..c2f0af2 100644
--- a/typeshed/stdlib/2/collections.pyi
+++ b/typeshed/stdlib/2/collections.pyi
@@ -18,7 +18,7 @@ _KT = TypeVar('_KT')
 _VT = TypeVar('_VT')
 
 # namedtuple is special-cased in the type checker; the initializer is ignored.
-def namedtuple(typename: str, field_names: Union[str, Iterable[Any]], *,
+def namedtuple(typename: Union[str, unicode], field_names: Union[str, unicode, Iterable[Any]], *,
                verbose: bool = ..., rename: bool = ...) -> Type[tuple]: ...
 
 class deque(Sized, Iterable[_T], Reversible[_T], Generic[_T]):
@@ -92,17 +92,3 @@ class defaultdict(Dict[_KT, _VT], Generic[_KT, _VT]):
     def __init__(self, default_factory: Callable[[], _VT],
                  iterable: Iterable[Tuple[_KT, _VT]]) -> None: ...
     def __missing__(self, key: _KT) -> _VT: ...
-
-class ChainMap(Dict[_KT, _VT], Generic[_KT, _VT]):
-    @overload
-    def __init__(self) -> None: ...
-    @overload
-    def __init__(self, *maps: Mapping[_KT, _VT]) -> None: ...
-
-    @property
-    def maps(self) -> List[Mapping[_KT, _VT]]: ...
-
-    def new_child(self, m: Mapping[_KT, _VT] = ...) -> ChainMap[_KT, _VT]: ...
-
-    @property
-    def parents(self) -> ChainMap[_KT, _VT]: ...
diff --git a/typeshed/stdlib/2/datetime.pyi b/typeshed/stdlib/2/datetime.pyi
index e7f4b44..6174b9a 100644
--- a/typeshed/stdlib/2/datetime.pyi
+++ b/typeshed/stdlib/2/datetime.pyi
@@ -38,7 +38,7 @@ class date(object):
     def day(self) -> int: ...
 
     def ctime(self) -> str: ...
-    def strftime(self, fmt: str) -> str: ...
+    def strftime(self, fmt: Union[str, unicode]) -> str: ...
     def __format__(self, fmt: Union[str, unicode]) -> str: ...
     def isoformat(self) -> str: ...
     def timetuple(self) -> struct_time: ...
@@ -83,7 +83,7 @@ class time:
     def __gt__(self, other: time) -> bool: ...
     def __hash__(self) -> int: ...
     def isoformat(self) -> str: ...
-    def strftime(self, fmt: str) -> str: ...
+    def strftime(self, fmt: Union[str, unicode]) -> str: ...
     def __format__(self, fmt: str) -> str: ...
     def utcoffset(self) -> Optional[timedelta]: ...
     def tzname(self) -> Optional[str]: ...
@@ -177,7 +177,7 @@ class datetime(object):
     def utcnow(cls) -> datetime: ...
     @classmethod
     def combine(cls, date: date, time: time) -> datetime: ...
-    def strftime(self, fmt: str) -> str: ...
+    def strftime(self, fmt: Union[str, unicode]) -> str: ...
     def __format__(self, fmt: str) -> str: ...
     def toordinal(self) -> int: ...
     def timetuple(self) -> struct_time: ...
@@ -193,7 +193,7 @@ class datetime(object):
     def ctime(self) -> str: ...
     def isoformat(self, sep: str = ...) -> str: ...
     @classmethod
-    def strptime(cls, date_string: str, format: str) -> datetime: ...
+    def strptime(cls, date_string: Union[str, unicode], format: Union[str, unicode]) -> datetime: ...
     def utcoffset(self) -> Optional[timedelta]: ...
     def tzname(self) -> Optional[str]: ...
     def dst(self) -> Optional[int]: ...
diff --git a/typeshed/stdlib/2/decimal.pyi b/typeshed/stdlib/2/decimal.pyi
index ce4d586..7d51c8f 100644
--- a/typeshed/stdlib/2/decimal.pyi
+++ b/typeshed/stdlib/2/decimal.pyi
@@ -54,7 +54,7 @@ def getcontext() -> Context: ...
 def localcontext(ctx: Optional[Context] = None) -> _ContextManager: ...
 
 class Decimal(SupportsAbs[Decimal], SupportsFloat, SupportsInt):
-    def __init__(cls, value: Union[_Decimal, float, str,
+    def __init__(cls, value: Union[_Decimal, float, str, unicode,
                                    Tuple[int, Sequence[int], int]] = ...,
                  context: Context = ...) -> None: ...
     @classmethod
diff --git a/typeshed/stdlib/2/sqlite3/dbapi2.pyi b/typeshed/stdlib/2/sqlite3/dbapi2.pyi
index 7a6dcf1..bd44433 100644
--- a/typeshed/stdlib/2/sqlite3/dbapi2.pyi
+++ b/typeshed/stdlib/2/sqlite3/dbapi2.pyi
@@ -79,12 +79,12 @@ def register_adapter(type: type, callable: Any) -> None: ...
 # TODO: sqlite3.register_converter.__doc__ specifies callable as unknown
 def register_converter(typename: str, callable: bytes) -> None: ...
 
-class Cache:
+class Cache(object):
     def __init__(self, *args, **kwargs) -> None: ...
     def display(self, *args, **kwargs) -> None: ...
     def get(self, *args, **kwargs) -> None: ...
 
-class Connection:
+class Connection(object):
     DataError = ...  # type: Any
     DatabaseError = ...  # type: Any
     Error = ...  # type: Any
@@ -165,7 +165,7 @@ class NotSupportedError(DatabaseError): ...
 
 class OperationalError(DatabaseError): ...
 
-class OptimizedUnicode:
+class OptimizedUnicode(object):
     maketrans = ...  # type: Any
     def __init__(self, *args, **kwargs): ...
     def capitalize(self, *args, **kwargs): ...
@@ -229,14 +229,13 @@ class OptimizedUnicode:
     def __ne__(self, other): ...
     def __rmod__(self, other): ...
     def __rmul__(self, other): ...
-    def __sizeof__(self): ...
 
-class PrepareProtocol:
+class PrepareProtocol(object):
     def __init__(self, *args, **kwargs): ...
 
 class ProgrammingError(DatabaseError): ...
 
-class Row:
+class Row(object):
     def __init__(self, *args, **kwargs): ...
     def keys(self, *args, **kwargs): ...
     def __eq__(self, other): ...
@@ -250,7 +249,7 @@ class Row:
     def __lt__(self, other): ...
     def __ne__(self, other): ...
 
-class Statement:
+class Statement(object):
     def __init__(self, *args, **kwargs): ...
 
 class Warning(Exception): ...
diff --git a/typeshed/stdlib/2/types.pyi b/typeshed/stdlib/2/types.pyi
index 2f3b935..15c8dd9 100644
--- a/typeshed/stdlib/2/types.pyi
+++ b/typeshed/stdlib/2/types.pyi
@@ -84,7 +84,12 @@ class UnboundMethodType:
     __func__ = im_func
     __self__ = im_self
     def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
-class InstanceType: ...
+
+class InstanceType:
+    __doc__ = ...  # type: Optional[str]
+    __class__ = ...  # type: type
+    __module__ = ...  # type: Any
+
 MethodType = UnboundMethodType
 
 class BuiltinFunctionType:
diff --git a/typeshed/stdlib/2/typing.pyi b/typeshed/stdlib/2/typing.pyi
index 035f36e..df17a70 100644
--- a/typeshed/stdlib/2/typing.pyi
+++ b/typeshed/stdlib/2/typing.pyi
@@ -13,6 +13,7 @@ Tuple = object()
 Callable = object()
 Type = object()
 _promote = object()
+ClassVar = object()
 
 class GenericMeta(type): ...
 
@@ -174,15 +175,17 @@ class ValuesView(MappingView, Iterable[_VT_co], Generic[_VT_co]):
     def __contains__(self, o: object) -> bool: ...
     def __iter__(self) -> Iterator[_VT_co]: ...
 
-class Mapping(Sized, Iterable[_KT], Container[_KT], Generic[_KT, _VT_co]):
+class Mapping(Iterable[_KT], Container[_KT], Sized, Generic[_KT, _VT_co]):
     # TODO: We wish the key type could also be covariant, but that doesn't work,
     # see discussion in https: //github.com/python/typing/pull/273.
     @abstractmethod
     def __getitem__(self, k: _KT) -> _VT_co:
         ...
     # Mixin methods
-    def get(self, k: _KT, default: _VT_co = ...) -> _VT_co:  # type: ignore
-        ...
+    @overload  # type: ignore
+    def get(self, k: _KT) -> Optional[_VT_co]: ...
+    @overload  # type: ignore
+    def get(self, k: _KT, default: Union[_VT_co, _T]) -> Union[_VT_co, _T]: ...
     def keys(self) -> list[_KT]: ...
     def values(self) -> list[_VT_co]: ...
     def items(self) -> list[Tuple[_KT, _VT_co]]: ...
@@ -353,8 +356,17 @@ def cast(tp: Type[_T], obj: Any) -> _T: ...
 
 # Type constructors
 
-# NamedTuple is special-cased in the type checker; the initializer is ignored.
-def NamedTuple(typename: str, fields: Iterable[Tuple[str, Any]], *,
-               verbose: bool = ..., rename: bool = ...) -> Type[tuple]: ...
+# NamedTuple is special-cased in the type checker
+class NamedTuple(tuple):
+    _fields = ...  # type: Tuple[str, ...]
+
+    def __init__(self, typename: str, fields: Iterable[Tuple[str, Any]], *,
+                 verbose: bool = ..., rename: bool = ...) -> None: ...
+
+    @classmethod
+    def _make(cls, iterable: Iterable[Any]) -> NamedTuple: ...
+
+    def _asdict(self) -> dict: ...
+    def _replace(self, **kwargs: Any) -> NamedTuple: ...
 
 def NewType(name: str, tp: Type[_T]) -> Type[_T]: ...
diff --git a/typeshed/stdlib/2/unittest.pyi b/typeshed/stdlib/2/unittest.pyi
index ce06d35..163ccce 100644
--- a/typeshed/stdlib/2/unittest.pyi
+++ b/typeshed/stdlib/2/unittest.pyi
@@ -6,7 +6,7 @@
 
 from typing import (
     Any, Callable, Dict, Iterable, Tuple, List, TextIO, Sequence,
-    overload, Set, TypeVar, Pattern
+    overload, Set, TypeVar, Union, Pattern
 )
 from abc import abstractmethod, ABCMeta
 
@@ -123,9 +123,9 @@ class TestCase(Testable):
                  msg: object = ...) -> None: ...
     def assertNotIn(self, first: _T, second: Iterable[_T],
                     msg: object = ...) -> None: ...
-    def assertIsInstance(self, obj: Any, cls: type,
+    def assertIsInstance(self, obj: Any, cls: Union[type, Tuple[type, ...]],
                          msg: object = ...) -> None: ...
-    def assertNotIsInstance(self, obj: Any, cls: type,
+    def assertNotIsInstance(self, obj: Any, cls: Union[type, Tuple[type, ...]],
                             msg: object = ...) -> None: ...
     def fail(self, msg: object = ...) -> None: ...
     def countTestCases(self) -> int: ...
@@ -163,10 +163,10 @@ class SkipTest(Exception):
     ...
 
 # TODO precise types
-def skipUnless(condition: Any, reason: str) -> Any: ...
-def skipIf(condition: Any, reason: str) -> Any: ...
+def skipUnless(condition: Any, reason: Union[str, unicode]) -> Any: ...
+def skipIf(condition: Any, reason: Union[str, unicode]) -> Any: ...
 def expectedFailure(func: _FT) -> _FT: ...
-def skip(reason: str) -> Any: ...
+def skip(reason: Union[str, unicode]) -> Any: ...
 
 def main(module: str = ..., defaultTest: str = ...,
          argv: List[str] = ..., testRunner: Any = ...,
diff --git a/typeshed/stdlib/2/wsgiref/types.pyi b/typeshed/stdlib/2/wsgiref/types.pyi
new file mode 100644
index 0000000..b7bd533
--- /dev/null
+++ b/typeshed/stdlib/2/wsgiref/types.pyi
@@ -0,0 +1,34 @@
+# Type declaration for a WSGI Function in Python 2
+#
+# wsgiref/types.py doesn't exist and neither does WSGIApplication, it's a type
+# provided for type checking purposes.
+#
+# This means you cannot simply import wsgiref.types in your code. Instead,
+# use the `TYPE_CHECKING` flag from the typing module:
+#
+#   from typing import TYPE_CHECKING
+#
+#   if TYPE_CHECKING:
+#       from wsgiref.types import WSGIApplication
+#
+# This import is now only taken into account by the type checker. Consequently,
+# you need to use 'WSGIApplication' and not simply WSGIApplication when type
+# hinting your code.  Otherwise Python will raise NameErrors.
+
+from typing import Callable, Dict, Iterable, List, Optional, Tuple, Type, Union
+from types import TracebackType
+
+_exc_info = Tuple[Optional[Type[BaseException]],
+                  Optional[BaseException],
+                  Optional[TracebackType]]
+_Text = Union[unicode, str]
+WSGIApplication = Callable[
+    [
+        Dict[_Text, _Text],
+        Union[
+            Callable[[_Text, List[Tuple[_Text, _Text]]], Callable[[_Text], None]],
+            Callable[[_Text, List[Tuple[_Text, _Text]], _exc_info], Callable[[_Text], None]]
+        ]
+    ],
+    Iterable[_Text]
+]
diff --git a/typeshed/stdlib/2/xml/etree/ElementTree.pyi b/typeshed/stdlib/2/xml/etree/ElementTree.pyi
deleted file mode 100644
index 89e61f6..0000000
--- a/typeshed/stdlib/2/xml/etree/ElementTree.pyi
+++ /dev/null
@@ -1,116 +0,0 @@
-# Stubs for xml.etree.ElementTree (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any, AnyStr, Union, IO, Callable, Dict, List, Tuple, Sequence, Iterator, TypeVar, Optional, Generator
-import io
-
-VERSION = ...  # type: str
-
-_Ss = TypeVar('_Ss', str, bytes)
-_T = TypeVar('_T')
-_str_or_bytes = Union[str, bytes]
-
-class _SimpleElementPath:
-    def find(self, element: 'Element', tag: _str_or_bytes, namespaces: Any=...) -> Optional['Element']: ...
-    def findtext(self, element: 'Element', tag: _str_or_bytes, default: _T=..., namespaces: Any=...) -> Union[str, bytes, _T]: ...
-    def iterfind(self, element: 'Element', tag: _str_or_bytes, namespaces: Any=...) -> Generator['Element', None, None]: ...
-    def findall(self, element: 'Element', tag: _str_or_bytes, namespaces: Any=...) -> List['Element']: ...
-
-class ParseError(SyntaxError): ...
-
-def iselement(element: 'Element') -> bool: ...
-
-class Element(Sequence['Element']):
-    tag = ...  # type: _str_or_bytes
-    attrib = ...  # type: Dict[_str_or_bytes, _str_or_bytes]
-    text = ...  # type: Optional[_str_or_bytes]
-    tail = ...  # type: Optional[_str_or_bytes]
-    def __init__(self, tag: Union[AnyStr, Callable[..., 'Element']], attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> None: ...
-    def append(self, element: 'Element') -> None: ...
-    def clear(self) -> None: ...
-    def copy(self) -> 'Element': ...
-    def extend(self, elements: Sequence['Element']) -> None: ...
-    def find(self, path: str, namespaces: Dict[str, str]=...) -> Optional['Element']: ...
-    def findall(self, path: str, namespaces: Dict[str, str]=...) -> List['Element']: ...
-    def findtext(self, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
-    def get(self, key: AnyStr, default: _T=...) -> Union[AnyStr, _T]: ...
-    def getchildren(self) -> List['Element']: ...
-    def getiterator(self, tag: Union[str, AnyStr]=...) -> List['Element']: ...
-    def insert(self, index: int, element: 'Element') -> None: ...
-    def items(self) -> List[Tuple[AnyStr, AnyStr]]: ...
-    def iter(self, tag: Union[str, AnyStr]=...) -> Generator['Element', None, None]: ...
-    def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List['Element']: ...
-    def itertext(self) -> Generator[str, None, None]: ...
-    def keys(self) -> List[AnyStr]: ...
-    def makeelement(self, tag: _Ss, attrib: Dict[_Ss, _Ss]) -> 'Element': ...
-    def remove(self, element: 'Element') -> None: ...
-    def set(self, key: AnyStr, value: AnyStr) -> None: ...
-    def __bool__(self) -> bool: ...
-    def __delitem__(self, index: int) -> None: ...
-    def __getitem__(self, index) -> 'Element': ...
-    def __len__(self) -> int: ...
-    def __setitem__(self, index: int, element: 'Element') -> None: ...
-
-def SubElement(parent: Element, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> Element: ...
-def Comment(text: _str_or_bytes=...) -> Element: ...
-def ProcessingInstruction(target: str, text: str=...) -> Element: ...
-
-PI = ...  # type: Callable[..., Element]
-
-class QName:
-    text = ...  # type: str
-    def __init__(self, text_or_uri: str, tag: str=...) -> None: ...
-
-
-_file_or_filename = Union[str, bytes, int, IO[Any]]
-
-class ElementTree:
-    def __init__(self, element: Element=..., file: _file_or_filename=...) -> None: ...
-    def getroot(self) -> Element: ...
-    def parse(self, source: _file_or_filename, parser: 'XMLParser'=...) -> Element: ...
-    def iter(self, tag: Union[str, AnyStr]=...) -> Generator[Element, None, None]: ...
-    def getiterator(self, tag: Union[str, AnyStr]=...) -> List[Element]: ...
-    def find(self, path: str, namespaces: Dict[str, str]=...) -> Optional[Element]: ...
-    def findtext(self, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
-    def findall(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
-    def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
-    def write(self, file_or_filename: _file_or_filename, encoding: str=..., xml_declaration: Optional[bool]=..., default_namespace: str=..., method: str=...) -> None: ...
-    def write_c14n(self, file: _file_or_filename) -> None: ...
-
-def register_namespace(prefix: str, uri: str) -> None: ...
-def tostring(element: Element, encoding: str=..., method: str=...) -> str: ...
-def tostringlist(element: Element, encoding: str=..., method: str=...) -> List[str]: ...
-def dump(elem: Element) -> None: ...
-def parse(source: _file_or_filename, parser: 'XMLParser'=...) -> ElementTree: ...
-def iterparse(source: _file_or_filename, events: Sequence[str]=..., parser: 'XMLParser'=...) -> Iterator[Tuple[str, Element]]: ...
-
-class _IterParseIterator:
-    root = ...  # type: Any
-    def __init__(self, source: _file_or_filename, events: Sequence[str], parser: 'XMLParser', close_source: bool=...) -> None: ...
-    def next(self) -> Tuple[str, Element]: ...
-    def __iter__(self) -> _IterParseIterator: ...
-
-def XML(text: AnyStr, parser: 'XMLParser'=...) -> Element: ...
-def XMLID(text: AnyStr, parser: 'XMLParser'=...) -> Tuple[Element, Dict[str, Element]]: ...
-# TODO-improve this type
-fromstring = ...  # type: Callable[..., Element]
-def fromstringlist(sequence: Sequence[AnyStr], parser: 'XMLParser'=...) -> Element: ...
-
-class TreeBuilder:
-    def __init__(self, element_factory: Callable[[AnyStr, Dict[AnyStr, AnyStr]], Element]=...) -> None: ...
-    def close(self) -> Element: ...
-    def data(self, data: AnyStr) -> None: ...
-    def start(self, tag: AnyStr, attrs: Dict[AnyStr, AnyStr]) -> Element: ...
-    def end(self, tag: AnyStr) -> Element: ...
-
-class XMLParser:
-    parser = ...  # type: Any
-    target = ...  # type: TreeBuilder
-    # TODO-what is entity used for???
-    entity = ...  # type: Any
-    version = ...  # type: str
-    def __init__(self, html: int=..., target: TreeBuilder=..., encoding: str=...) -> None: ...
-    def doctype(self, name: str, pubid: str, system: str) -> None: ...
-    def close(self) -> Any: ...  # TODO-most of the time, this will be Element, but it can be anything target.close() returns
-    def feed(self, data: AnyStr)-> None: ...
diff --git a/typeshed/stdlib/2and3/argparse.pyi b/typeshed/stdlib/2and3/argparse.pyi
index 627426d..c481d9e 100644
--- a/typeshed/stdlib/2and3/argparse.pyi
+++ b/typeshed/stdlib/2and3/argparse.pyi
@@ -8,6 +8,10 @@ import sys
 
 _T = TypeVar('_T')
 
+if sys.version_info >= (3,):
+    _Text = str
+else:
+    _Text = Union[str, unicode]
 
 ONE_OR_MORE = ...  # type: str
 OPTIONAL = ...  # type: str
@@ -27,67 +31,67 @@ class ArgumentParser:
                      epilog: Optional[str] = ...,
                      parents: Sequence[ArgumentParser] = ...,
                      formatter_class: Type[HelpFormatter] = ...,
-                     prefix_chars: str = ...,
+                     prefix_chars: _Text = ...,
                      fromfile_prefix_chars: Optional[str] = ...,
                      argument_default: Optional[str] = ...,
-                     conflict_handler: str = ...,
+                     conflict_handler: _Text = ...,
                      add_help: bool = ...,
                      allow_abbrev: bool = ...) -> None: ...
     else:
         def __init__(self,
-                     prog: Optional[str] = ...,
-                     usage: Optional[str] = ...,
-                     description: Optional[str] = ...,
-                     epilog: Optional[str] = ...,
+                     prog: Optional[_Text] = ...,
+                     usage: Optional[_Text] = ...,
+                     description: Optional[_Text] = ...,
+                     epilog: Optional[_Text] = ...,
                      parents: Sequence[ArgumentParser] = ...,
                      formatter_class: Type[HelpFormatter] = ...,
-                     prefix_chars: str = ...,
-                     fromfile_prefix_chars: Optional[str] = ...,
-                     argument_default: Optional[str] = ...,
-                     conflict_handler: str = ...,
+                     prefix_chars: _Text = ...,
+                     fromfile_prefix_chars: Optional[_Text] = ...,
+                     argument_default: Optional[_Text] = ...,
+                     conflict_handler: _Text = ...,
                      add_help: bool = ...) -> None: ...
     def add_argument(self,
-                     *name_or_flags: Union[str, Sequence[str]],
-                     action: Union[str, Type[Action]] = ...,
-                     nargs: Union[int, str] = ...,
+                     *name_or_flags: Union[_Text, Sequence[_Text]],
+                     action: Union[_Text, Type[Action]] = ...,
+                     nargs: Union[int, _Text] = ...,
                      const: Any = ...,
                      default: Any = ...,
                      type: Union[Callable[[str], _T], FileType] = ...,
                      choices: Iterable[_T] = ...,
                      required: bool = ...,
-                     help: str = ...,
-                     metavar: Union[str, Tuple[str, ...]] = ...,
-                     dest: str = ...,
-                     version: str = ...) -> None: ...  # weirdly documented
-    def parse_args(self, args: Optional[Sequence[str]] = ...,
+                     help: _Text = ...,
+                     metavar: Union[_Text, Tuple[_Text, ...]] = ...,
+                     dest: _Text = ...,
+                     version: _Text = ...) -> None: ...  # weirdly documented
+    def parse_args(self, args: Optional[Sequence[_Text]] = ...,
                    namespace: Optional[Namespace] = ...) -> Namespace: ...
-    def add_subparsers(self, title: str = ...,
-                       description: Optional[str] = ...,
-                       prog: str = ...,
+    def add_subparsers(self, title: _Text = ...,
+                       description: Optional[_Text] = ...,
+                       prog: _Text = ...,
                        parser_class: Type[ArgumentParser] = ...,
                        action: Type[Action] = ...,
-                       option_string: str = ...,
-                       dest: Optional[str] = ...,
-                       help: Optional[str] = ...,
-                       metavar: Optional[str] = ...) -> _SubParsersAction: ...
-    def add_argument_group(self, title: Optional[str] = ...,
-                           description: Optional[str] = ...) -> _ArgumentGroup: ...
+                       option_string: _Text = ...,
+                       dest: Optional[_Text] = ...,
+                       help: Optional[_Text] = ...,
+                       metavar: Optional[_Text] = ...) -> _SubParsersAction: ...
+    def add_argument_group(self, title: Optional[_Text] = ...,
+                           description: Optional[_Text] = ...) -> _ArgumentGroup: ...
     def add_mutually_exclusive_group(self, required: bool = ...) -> _MutuallyExclusiveGroup: ...
     def set_defaults(self, **kwargs: Any) -> None: ...
-    def get_default(self, dest: str) -> Any: ...
+    def get_default(self, dest: _Text) -> Any: ...
     def print_usage(self, file: Optional[IO[str]] = ...) -> None: ...
     def print_help(self, file: Optional[IO[str]] = ...) -> None: ...
     def format_usage(self) -> str: ...
     def format_help(self) -> str: ...
-    def parse_known_args(self, args: Optional[Sequence[str]] = ...,
+    def parse_known_args(self, args: Optional[Sequence[_Text]] = ...,
                          namespace: Optional[Namespace] = ...) -> Tuple[Namespace, List[str]]: ...
-    def convert_arg_line_to_args(self, arg_line: str) -> List[str]: ...
-    def exit(self, status: int = ..., message: Optional[str] = ...) -> None: ...
-    def error(self, message: str) -> None: ...
+    def convert_arg_line_to_args(self, arg_line: _Text) -> List[str]: ...
+    def exit(self, status: int = ..., message: Optional[_Text] = ...) -> None: ...
+    def error(self, message: _Text) -> None: ...
 
 class HelpFormatter:
     # not documented
-    def __init__(self, prog: str, indent_increment: int = ...,
+    def __init__(self, prog: _Text, indent_increment: int = ...,
                  max_help_position: int = ...,
                  width: Optional[int] = ...) -> None: ...
 class RawDescriptionHelpFormatter(HelpFormatter): ...
@@ -98,58 +102,58 @@ if sys.version_info >= (3,):
 
 class Action:
     def __init__(self,
-                 option_strings: Sequence[str],
-                 dest: str = ...,
-                 nargs: Optional[Union[int, str]] = ...,
+                 option_strings: Sequence[_Text],
+                 dest: _Text = ...,
+                 nargs: Optional[Union[int, _Text]] = ...,
                  const: Any = ...,
                  default: Any = ...,
                  type: Union[Callable[[str], _T], FileType, None] = ...,
                  choices: Optional[Iterable[_T]] = ...,
                  required: bool = ...,
-                 help: Optional[str] = ...,
-                 metavar: Union[str, Tuple[str, ...]] = ...) -> None: ...
+                 help: Optional[_Text] = ...,
+                 metavar: Union[_Text, Tuple[_Text, ...]] = ...) -> None: ...
     def __call__(self, parser: ArgumentParser, namespace: Namespace,
-                 values: Union[str, Sequence[Any], None],
-                 option_string: str = ...) -> None: ...
+                 values: Union[_Text, Sequence[Any], None],
+                 option_string: _Text = ...) -> None: ...
 
 class Namespace:
-    def __getattr__(self, name: str) -> Any: ...
-    def __setattr__(self, name: str, value: Any) -> None: ...
+    def __getattr__(self, name: _Text) -> Any: ...
+    def __setattr__(self, name: _Text, value: Any) -> None: ...
 
 class FileType:
     if sys.version_info >= (3, 4):
-        def __init__(self, mode: str = ..., bufsize: int = ...,
-                     encoding: Optional[str] = ...,
-                     errors: Optional[str] = ...) -> None: ...
+        def __init__(self, mode: _Text = ..., bufsize: int = ...,
+                     encoding: Optional[_Text] = ...,
+                     errors: Optional[_Text] = ...) -> None: ...
     elif sys.version_info >= (3,):
         def __init__(self,
-                     mode: str = ..., bufsize: int = ...) -> None: ...
+                     mode: _Text = ..., bufsize: int = ...) -> None: ...
     else:
         def __init__(self,
-                     mode: str = ..., bufsize: Optional[int] = ...) -> None: ...
-    def __call__(self, string: str) -> IO[Any]: ...
+                     mode: _Text = ..., bufsize: Optional[int] = ...) -> None: ...
+    def __call__(self, string: _Text) -> IO[Any]: ...
 
 class _ArgumentGroup:
     def add_argument(self,
-                     *name_or_flags: Union[str, Sequence[str]],
-                     action: Union[str, Type[Action]] = ...,
-                     nargs: Union[int, str] = ...,
+                     *name_or_flags: Union[_Text, Sequence[_Text]],
+                     action: Union[_Text, Type[Action]] = ...,
+                     nargs: Union[int, _Text] = ...,
                      const: Any = ...,
                      default: Any = ...,
                      type: Union[Callable[[str], _T], FileType] = ...,
                      choices: Iterable[_T] = ...,
                      required: bool = ...,
-                     help: str = ...,
-                     metavar: Union[str, Tuple[str, ...]] = ...,
-                     dest: str = ...,
-                     version: str = ...) -> None: ...
+                     help: _Text = ...,
+                     metavar: Union[_Text, Tuple[_Text, ...]] = ...,
+                     dest: _Text = ...,
+                     version: _Text = ...) -> None: ...
     def add_mutually_exclusive_group(self, required: bool = ...) -> _MutuallyExclusiveGroup: ...
 
 class _MutuallyExclusiveGroup(_ArgumentGroup): ...
 
 class _SubParsersAction:
     # TODO: Type keyword args properly.
-    def add_parser(self, name: str, **kwargs: Any) -> ArgumentParser: ...
+    def add_parser(self, name: _Text, **kwargs: Any) -> ArgumentParser: ...
 
 # not documented
 class ArgumentTypeError(Exception): ...
diff --git a/typeshed/stdlib/2and3/logging/__init__.pyi b/typeshed/stdlib/2and3/logging/__init__.pyi
index eb7fcee..f7751db 100644
--- a/typeshed/stdlib/2and3/logging/__init__.pyi
+++ b/typeshed/stdlib/2and3/logging/__init__.pyi
@@ -275,7 +275,7 @@ else:
     @overload
     def getLogger() -> Logger: ...
     @overload
-    def getLogger(name: str) -> Logger: ...
+    def getLogger(name: Union[Text, str]) -> Logger: ...
 def getLoggerClass() -> type: ...
 if sys.version_info >= (3,):
     def getLogRecordFactory() -> Callable[..., LogRecord]: ...
diff --git a/typeshed/stdlib/2/syslog.pyi b/typeshed/stdlib/2and3/syslog.pyi
similarity index 92%
rename from typeshed/stdlib/2/syslog.pyi
rename to typeshed/stdlib/2and3/syslog.pyi
index 82e0b9a..15534e7 100644
--- a/typeshed/stdlib/2/syslog.pyi
+++ b/typeshed/stdlib/2and3/syslog.pyi
@@ -1,3 +1,5 @@
+from typing import overload
+
 LOG_ALERT = ...  # type: int
 LOG_AUTH = ...  # type: int
 LOG_CONS = ...  # type: int
@@ -35,4 +37,8 @@ def LOG_UPTO(a: int) -> int: ...
 def closelog() -> None: ...
 def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: ...
 def setlogmask(x: int) -> int: ...
+
+ at overload
 def syslog(priority: int, message: str) -> None: ...
+ at overload
+def syslog(message: str) -> None: ...
diff --git a/typeshed/stdlib/2/xml/etree/ElementInclude.pyi b/typeshed/stdlib/2and3/xml/etree/ElementInclude.pyi
similarity index 89%
rename from typeshed/stdlib/2/xml/etree/ElementInclude.pyi
rename to typeshed/stdlib/2and3/xml/etree/ElementInclude.pyi
index 290ee09..519506e 100644
--- a/typeshed/stdlib/2/xml/etree/ElementInclude.pyi
+++ b/typeshed/stdlib/2and3/xml/etree/ElementInclude.pyi
@@ -1,6 +1,4 @@
 # Stubs for xml.etree.ElementInclude (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
 
 from typing import Union, Optional, Callable
 from xml.etree.ElementTree import Element
diff --git a/typeshed/stdlib/2/xml/etree/ElementPath.pyi b/typeshed/stdlib/2and3/xml/etree/ElementPath.pyi
similarity index 95%
rename from typeshed/stdlib/2/xml/etree/ElementPath.pyi
rename to typeshed/stdlib/2and3/xml/etree/ElementPath.pyi
index d0095e3..91e85cd 100644
--- a/typeshed/stdlib/2/xml/etree/ElementPath.pyi
+++ b/typeshed/stdlib/2and3/xml/etree/ElementPath.pyi
@@ -1,6 +1,4 @@
 # Stubs for xml.etree.ElementPath (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
 
 from typing import Pattern, Dict, Generator, Tuple, List, Union, TypeVar, Callable, Optional
 from xml.etree.ElementTree import Element
diff --git a/typeshed/stdlib/3.2/xml/etree/ElementTree.pyi b/typeshed/stdlib/2and3/xml/etree/ElementTree.pyi
similarity index 78%
rename from typeshed/stdlib/3.2/xml/etree/ElementTree.pyi
rename to typeshed/stdlib/2and3/xml/etree/ElementTree.pyi
index 0ae1e3f..fe93987 100644
--- a/typeshed/stdlib/3.2/xml/etree/ElementTree.pyi
+++ b/typeshed/stdlib/2and3/xml/etree/ElementTree.pyi
@@ -1,26 +1,19 @@
 # Stubs for xml.etree.ElementTree (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
 
 from typing import Any, AnyStr, Union, IO, Callable, Dict, List, Tuple, Sequence, Iterator, TypeVar, Optional, KeysView, ItemsView, Generator
 import io
+import sys
 
 VERSION = ...  # type: str
 
-_Ss = TypeVar('_Ss', str, bytes)
-_T = TypeVar('_T')
-_str_or_bytes = Union[str, bytes]
-
-class _SimpleElementPath:
-    def find(self, element: 'Element', tag: _str_or_bytes, namespaces: Any=...) -> Optional['Element']: ...
-    def findtext(self, element: 'Element', tag: _str_or_bytes, default: _T=..., namespaces: Any=...) -> Union[str, bytes, _T]: ...
-    def iterfind(self, element: 'Element', tag: _str_or_bytes, namespaces: Any=...) -> Generator['Element', None, None]: ...
-    def findall(self, element: 'Element', tag: _str_or_bytes, namespaces: Any=...) -> List['Element']: ...
-
 class ParseError(SyntaxError): ...
 
 def iselement(element: 'Element') -> bool: ...
 
+_Ss = TypeVar('_Ss', str, bytes)
+_T = TypeVar('_T')
+_str_or_bytes = Union[str, bytes]
+
 class Element(Sequence['Element']):
     tag = ...  # type: _str_or_bytes
     attrib = ...  # type: Dict[_str_or_bytes, _str_or_bytes]
@@ -37,7 +30,10 @@ class Element(Sequence['Element']):
     def get(self, key: AnyStr, default: _T=...) -> Union[AnyStr, _T]: ...
     def getchildren(self) -> List['Element']: ...
     def getiterator(self, tag: Union[str, AnyStr]=...) -> List['Element']: ...
-    def insert(self, index: int, subelement: 'Element') -> None: ...
+    if sys.version_info >= (3, 2):
+        def insert(self, index: int, subelement: 'Element') -> None: ...
+    else:
+        def insert(self, index: int, element: 'Element') -> None: ...
     def items(self) -> ItemsView[AnyStr, AnyStr]: ...
     def iter(self, tag: Union[str, AnyStr]=...) -> Generator['Element', None, None]: ...
     def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List['Element']: ...
@@ -75,23 +71,29 @@ class ElementTree:
     def findtext(self, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
     def findall(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
     def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
-    def write(self, file_or_filename: _file_or_filename, encoding: str=..., xml_declaration: Optional[bool]=..., default_namespace: str=..., method: str=...) -> None: ...
+    if sys.version_info >= (3, 4):
+        def write(self, file_or_filename: _file_or_filename, encoding: str=..., xml_declaration: Optional[bool]=..., default_namespace: str=..., method: str=..., *, short_empty_elements: bool=...) -> None: ...
+    else:
+        def write(self, file_or_filename: _file_or_filename, encoding: str=..., xml_declaration: Optional[bool]=..., default_namespace: str=..., method: str=...) -> None: ...
     def write_c14n(self, file: _file_or_filename) -> None: ...
 
 def register_namespace(prefix: str, uri: str) -> None: ...
-def tostring(element: Element, encoding: str=..., method: str=...) -> str: ...
-
-def tostringlist(element: Element, encoding: str=..., method: str=...) -> List[str]: ...
+if sys.version_info >= (3, 4):
+    def tostring(element: Element, encoding: str=..., method: str=..., *, short_empty_elements: bool=...) -> str: ...
+    def tostringlist(element: Element, encoding: str=..., method: str=..., *, short_empty_elements: bool=...) -> List[str]: ...
+else:
+    def tostring(element: Element, encoding: str=..., method: str=...) -> str: ...
+    def tostringlist(element: Element, encoding: str=..., method: str=...) -> List[str]: ...
 def dump(elem: Element) -> None: ...
 def parse(source: _file_or_filename, parser: 'XMLParser'=...) -> ElementTree: ...
 def iterparse(source: _file_or_filename, events: Sequence[str]=..., parser: 'XMLParser'=...) -> Iterator[Tuple[str, Element]]: ...
 
-
-class _IterParseIterator:
-    root = ...  # type: Any
-    def __init__(self, source: _file_or_filename, events: Sequence[str], parser: 'XMLParser', close_source: bool=...) -> None: ...
-    def __next__(self) -> Tuple[str, Element]: ...
-    def __iter__(self) -> _IterParseIterator: ...
+if sys.version_info >= (3, 4):
+    class XMLPullParser:
+        def __init__(self, events: Sequence[str]=..., *, _parser: 'XMLParser'=...) -> None: ...
+        def feed(self, data: bytes) -> None: ...
+        def close(self) -> None: ...
+        def read_events(self) -> Iterator[Tuple[str, Element]]: ...
 
 def XML(text: AnyStr, parser: 'XMLParser'=...) -> Element: ...
 def XMLID(text: AnyStr, parser: 'XMLParser'=...) -> Tuple[Element, Dict[str, Element]]: ...
diff --git a/typeshed/stdlib/3.4/xml/__init__.pyi b/typeshed/stdlib/2and3/xml/etree/__init__.pyi
similarity index 100%
rename from typeshed/stdlib/3.4/xml/__init__.pyi
rename to typeshed/stdlib/2and3/xml/etree/__init__.pyi
diff --git a/typeshed/stdlib/2/xml/etree/cElementTree.pyi b/typeshed/stdlib/2and3/xml/etree/cElementTree.pyi
similarity index 55%
rename from typeshed/stdlib/2/xml/etree/cElementTree.pyi
rename to typeshed/stdlib/2and3/xml/etree/cElementTree.pyi
index 8f689de..c384968 100644
--- a/typeshed/stdlib/2/xml/etree/cElementTree.pyi
+++ b/typeshed/stdlib/2and3/xml/etree/cElementTree.pyi
@@ -1,5 +1,3 @@
 # Stubs for xml.etree.cElementTree (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
 
 from xml.etree.ElementTree import *  # noqa: F403
diff --git a/typeshed/stdlib/3.2/xml/etree/ElementInclude.pyi b/typeshed/stdlib/3.2/xml/etree/ElementInclude.pyi
deleted file mode 100644
index bbe3fb3..0000000
--- a/typeshed/stdlib/3.2/xml/etree/ElementInclude.pyi
+++ /dev/null
@@ -1,19 +0,0 @@
-# Stubs for xml.etree.ElementInclude (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Union, Optional, Callable
-from .ElementTree import Element
-
-XINCLUDE = ...  # type: str
-XINCLUDE_INCLUDE = ...  # type: str
-XINCLUDE_FALLBACK = ...  # type: str
-
-class FatalIncludeError(SyntaxError): ...
-
-def default_loader(href: Union[str, bytes, int], parse: str, encoding: Optional[str]=...) -> Union[str, Element]: ...
-
-# TODO: loader is of type default_loader ie it takes a callable that has the
-# same signature as default_loader. But default_loader has a keyword argument
-# Which can't be represented using Callable...
-def include(elem: Element, loader: Callable[..., Union[str, Element]]=...) -> None: ...
diff --git a/typeshed/stdlib/3.2/xml/etree/ElementPath.pyi b/typeshed/stdlib/3.2/xml/etree/ElementPath.pyi
deleted file mode 100644
index b1ea50e..0000000
--- a/typeshed/stdlib/3.2/xml/etree/ElementPath.pyi
+++ /dev/null
@@ -1,35 +0,0 @@
-# Stubs for xml.etree.ElementPath (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Pattern, Dict, Generator, Tuple, List, Union, TypeVar, Callable, Optional
-from .ElementTree import Element
-
-xpath_tokenizer_re = ...  # type: Pattern
-
-_token = Tuple[str, str]
-_next = Callable[[], _token]
-_callback = Callable[['_SelectorContext', List[Element]], Generator[Element, None, None]]
-
-def xpath_tokenizer(pattern: str, namespaces: Dict[str, str]=...) -> Generator[_token, None, None]: ...
-def get_parent_map(context: '_SelectorContext') -> Dict[Element, Element]: ...
-def prepare_child(next: _next, token: _token) -> _callback: ...
-def prepare_star(next: _next, token: _token) -> _callback: ...
-def prepare_self(next: _next, token: _token) -> _callback: ...
-def prepare_descendant(next: _next, token: _token) -> _callback: ...
-def prepare_parent(next: _next, token: _token) -> _callback: ...
-def prepare_predicate(next: _next, token: _token) -> _callback: ...
-
-ops = ...  # type: Dict[str, Callable[[_next, _token], _callback]]
-
-class _SelectorContext:
-    parent_map = ...  # type: Dict[Element, Element]
-    root = ...  # type: Element
-    def __init__(self, root: Element) -> None: ...
-
-_T = TypeVar('_T')
-
-def iterfind(elem: Element, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
-def find(elem: Element, path: str, namespaces: Dict[str, str]=...) -> Optional[Element]: ...
-def findall(elem: Element, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
-def findtext(elem: Element, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
diff --git a/typeshed/stdlib/3.2/xml/etree/cElementTree.pyi b/typeshed/stdlib/3.2/xml/etree/cElementTree.pyi
deleted file mode 100644
index 8f689de..0000000
--- a/typeshed/stdlib/3.2/xml/etree/cElementTree.pyi
+++ /dev/null
@@ -1,5 +0,0 @@
-# Stubs for xml.etree.cElementTree (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from xml.etree.ElementTree import *  # noqa: F403
diff --git a/typeshed/stdlib/3.3/xml/etree/ElementInclude.pyi b/typeshed/stdlib/3.3/xml/etree/ElementInclude.pyi
deleted file mode 100644
index bbe3fb3..0000000
--- a/typeshed/stdlib/3.3/xml/etree/ElementInclude.pyi
+++ /dev/null
@@ -1,19 +0,0 @@
-# Stubs for xml.etree.ElementInclude (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Union, Optional, Callable
-from .ElementTree import Element
-
-XINCLUDE = ...  # type: str
-XINCLUDE_INCLUDE = ...  # type: str
-XINCLUDE_FALLBACK = ...  # type: str
-
-class FatalIncludeError(SyntaxError): ...
-
-def default_loader(href: Union[str, bytes, int], parse: str, encoding: Optional[str]=...) -> Union[str, Element]: ...
-
-# TODO: loader is of type default_loader ie it takes a callable that has the
-# same signature as default_loader. But default_loader has a keyword argument
-# Which can't be represented using Callable...
-def include(elem: Element, loader: Callable[..., Union[str, Element]]=...) -> None: ...
diff --git a/typeshed/stdlib/3.3/xml/etree/ElementPath.pyi b/typeshed/stdlib/3.3/xml/etree/ElementPath.pyi
deleted file mode 100644
index b1ea50e..0000000
--- a/typeshed/stdlib/3.3/xml/etree/ElementPath.pyi
+++ /dev/null
@@ -1,35 +0,0 @@
-# Stubs for xml.etree.ElementPath (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Pattern, Dict, Generator, Tuple, List, Union, TypeVar, Callable, Optional
-from .ElementTree import Element
-
-xpath_tokenizer_re = ...  # type: Pattern
-
-_token = Tuple[str, str]
-_next = Callable[[], _token]
-_callback = Callable[['_SelectorContext', List[Element]], Generator[Element, None, None]]
-
-def xpath_tokenizer(pattern: str, namespaces: Dict[str, str]=...) -> Generator[_token, None, None]: ...
-def get_parent_map(context: '_SelectorContext') -> Dict[Element, Element]: ...
-def prepare_child(next: _next, token: _token) -> _callback: ...
-def prepare_star(next: _next, token: _token) -> _callback: ...
-def prepare_self(next: _next, token: _token) -> _callback: ...
-def prepare_descendant(next: _next, token: _token) -> _callback: ...
-def prepare_parent(next: _next, token: _token) -> _callback: ...
-def prepare_predicate(next: _next, token: _token) -> _callback: ...
-
-ops = ...  # type: Dict[str, Callable[[_next, _token], _callback]]
-
-class _SelectorContext:
-    parent_map = ...  # type: Dict[Element, Element]
-    root = ...  # type: Element
-    def __init__(self, root: Element) -> None: ...
-
-_T = TypeVar('_T')
-
-def iterfind(elem: Element, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
-def find(elem: Element, path: str, namespaces: Dict[str, str]=...) -> Optional[Element]: ...
-def findall(elem: Element, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
-def findtext(elem: Element, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
diff --git a/typeshed/stdlib/3.3/xml/etree/ElementTree.pyi b/typeshed/stdlib/3.3/xml/etree/ElementTree.pyi
deleted file mode 100644
index 148b89e..0000000
--- a/typeshed/stdlib/3.3/xml/etree/ElementTree.pyi
+++ /dev/null
@@ -1,113 +0,0 @@
-# Stubs for xml.etree.ElementTree (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any, AnyStr, Union, IO, Callable, Dict, List, Tuple, Sequence, Iterator, TypeVar, Optional, KeysView, ItemsView, Generator
-import io
-
-VERSION = ...  # type: str
-
-class ParseError(SyntaxError): ...
-
-def iselement(element: 'Element') -> bool: ...
-
-_Ss = TypeVar('_Ss', str, bytes)
-_T = TypeVar('_T')
-_str_or_bytes = Union[str, bytes]
-
-class Element(Sequence['Element']):
-    tag = ...  # type: _str_or_bytes
-    attrib = ...  # type: Dict[_str_or_bytes, _str_or_bytes]
-    text = ...  # type: Optional[_str_or_bytes]
-    tail = ...  # type: Optional[_str_or_bytes]
-    def __init__(self, tag: Union[AnyStr, Callable[..., 'Element']], attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> None: ...
-    def append(self, subelement: 'Element') -> None: ...
-    def clear(self) -> None: ...
-    def copy(self) -> 'Element': ...
-    def extend(self, elements: Sequence['Element']) -> None: ...
-    def find(self, path: str, namespaces: Dict[str, str]=...) -> Optional['Element']: ...
-    def findall(self, path: str, namespaces: Dict[str, str]=...) -> List['Element']: ...
-    def findtext(self, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
-    def get(self, key: AnyStr, default: _T=...) -> Union[AnyStr, _T]: ...
-    def getchildren(self) -> List['Element']: ...
-    def getiterator(self, tag: Union[str, AnyStr]=...) -> List['Element']: ...
-    def insert(self, index: int, subelement: 'Element') -> None: ...
-    def items(self) -> ItemsView[AnyStr, AnyStr]: ...
-    def iter(self, tag: Union[str, AnyStr]=...) -> Generator['Element', None, None]: ...
-    def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List['Element']: ...
-    def itertext(self) -> Generator[str, None, None]: ...
-    def keys(self) -> KeysView[AnyStr]: ...
-    def makeelement(self, tag: _Ss, attrib: Dict[_Ss, _Ss]) -> 'Element': ...
-    def remove(self, subelement: 'Element') -> None: ...
-    def set(self, key: AnyStr, value: AnyStr) -> None: ...
-    def __bool__(self) -> bool: ...
-    def __delitem__(self, index: int) -> None: ...
-    def __getitem__(self, index) -> 'Element': ...
-    def __len__(self) -> int: ...
-    def __setitem__(self, index: int, element: 'Element') -> None: ...
-
-def SubElement(parent: Element, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> Element: ...
-def Comment(text: _str_or_bytes=...) -> Element: ...
-def ProcessingInstruction(target: str, text: str=...) -> Element: ...
-
-PI = ...  # type: Callable[..., Element]
-
-class QName:
-    text = ...  # type: str
-    def __init__(self, text_or_uri: str, tag: str=...) -> None: ...
-
-
-_file_or_filename = Union[str, bytes, int, IO[Any]]
-
-class ElementTree:
-    def __init__(self, element: Element=..., file: _file_or_filename=...) -> None: ...
-    def getroot(self) -> Element: ...
-    def parse(self, source: _file_or_filename, parser: 'XMLParser'=...) -> Element: ...
-    def iter(self, tag: Union[str, AnyStr]=...) -> Generator[Element, None, None]: ...
-    def getiterator(self, tag: Union[str, AnyStr]=...) -> List[Element]: ...
-    def find(self, path: str, namespaces: Dict[str, str]=...) -> Optional[Element]: ...
-    def findtext(self, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
-    def findall(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
-    def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
-    def write(self, file_or_filename: _file_or_filename, encoding: str=..., xml_declaration: Optional[bool]=..., default_namespace: str=..., method: str=...) -> None: ...
-    def write_c14n(self, file: _file_or_filename) -> None: ...
-
-def register_namespace(prefix: str, uri: str) -> None: ...
-def tostring(element: Element, encoding: str=..., method: str=...) -> str: ...
-def tostringlist(element: Element, encoding: str=..., method: str=...) -> List[str]: ...
-def dump(elem: Element) -> None: ...
-def parse(source: _file_or_filename, parser: 'XMLParser'=...) -> ElementTree: ...
-def iterparse(source: _file_or_filename, events: Sequence[str]=..., parser: 'XMLParser'=...) -> Iterator[Tuple[str, Element]]: ...
-
-
-class _IterParseIterator:
-    root = ...  # type: Any
-    def __init__(self, source: _file_or_filename, events: Sequence[str], parser: 'XMLParser', close_source: bool=...) -> None: ...
-    def __next__(self) -> Tuple[str, Element]: ...
-    def __iter__(self) -> _IterParseIterator: ...
-
-def XML(text: AnyStr, parser: 'XMLParser'=...) -> Element: ...
-def XMLID(text: AnyStr, parser: 'XMLParser'=...) -> Tuple[Element, Dict[str, Element]]: ...
-
-# TODO-improve this type
-fromstring = ...  # type: Callable[..., Element]
-
-def fromstringlist(sequence: Sequence[AnyStr], parser: 'XMLParser'=...) -> Element: ...
-
-class TreeBuilder:
-    def __init__(self, element_factory: Callable[[AnyStr, Dict[AnyStr, AnyStr]], Element]=...) -> None: ...
-    def close(self) -> Element: ...
-    def data(self, data: AnyStr) -> None: ...
-    def start(self, tag: AnyStr, attrs: Dict[AnyStr, AnyStr]) -> Element: ...
-    def end(self, tag: AnyStr) -> Element: ...
-
-class XMLParser:
-    parser = ...  # type: Any
-    target = ...  # type: TreeBuilder
-    # TODO-what is entity used for???
-    entity = ...  # type: Any
-    version = ...  # type: str
-    def __init__(self, html: int=..., target: TreeBuilder=..., encoding: str=...) -> None: ...
-    def doctype(self, name: str, pubid: str, system: str) -> None: ...
-    def close(self) -> Any: ...  # TODO-most of the time, this will be Element, but it can be anything target.close() returns
-    def feed(self, data: AnyStr)-> None: ...
diff --git a/typeshed/stdlib/3.3/xml/etree/cElementTree.pyi b/typeshed/stdlib/3.3/xml/etree/cElementTree.pyi
deleted file mode 100644
index 8f689de..0000000
--- a/typeshed/stdlib/3.3/xml/etree/cElementTree.pyi
+++ /dev/null
@@ -1,5 +0,0 @@
-# Stubs for xml.etree.cElementTree (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from xml.etree.ElementTree import *  # noqa: F403
diff --git a/typeshed/stdlib/3.4/asyncio/__init__.pyi b/typeshed/stdlib/3.4/asyncio/__init__.pyi
index 5f83bc6..3f88dac 100644
--- a/typeshed/stdlib/3.4/asyncio/__init__.pyi
+++ b/typeshed/stdlib/3.4/asyncio/__init__.pyi
@@ -72,7 +72,6 @@ from asyncio.queues import (
     Queue as Queue,
     PriorityQueue as PriorityQueue,
     LifoQueue as LifoQueue,
-    JoinableQueue as JoinableQueue,
     QueueFull as QueueFull,
     QueueEmpty as QueueEmpty,
 )
@@ -84,6 +83,9 @@ from asyncio.locks import (
     BoundedSemaphore as BoundedSemaphore,
 )
 
+if sys.version_info < (3, 5):
+    from asyncio.queues import JoinableQueue as JoinableQueue
+
 # TODO: It should be possible to instantiate these classes, but mypy
 # currently disallows this.
 # See https://github.com/python/mypy/issues/1843
diff --git a/typeshed/stdlib/3.4/asyncio/queues.pyi b/typeshed/stdlib/3.4/asyncio/queues.pyi
index dd1ffe6..2901218 100644
--- a/typeshed/stdlib/3.4/asyncio/queues.pyi
+++ b/typeshed/stdlib/3.4/asyncio/queues.pyi
@@ -1,3 +1,4 @@
+import sys
 from asyncio.events import AbstractEventLoop
 from .coroutines import coroutine
 from .futures import Future
@@ -32,6 +33,10 @@ class Queue(Generic[T]):
     @coroutine
     def get(self) -> Future[T]: ...
     def get_nowait(self) -> T: ...
+    if sys.version_info >= (3, 4):
+        @coroutine
+        def join(self) -> None: ...
+        def task_done(self) -> None: ...
 
 
 class PriorityQueue(Queue): ...
@@ -39,8 +44,8 @@ class PriorityQueue(Queue): ...
 
 class LifoQueue(Queue): ...
 
-
-class JoinableQueue(Queue):
-    def task_done(self) -> None: ...
-    @coroutine
-    def join(self) -> None: ...
+if sys.version_info < (3, 5):
+    class JoinableQueue(Queue):
+        def task_done(self) -> None: ...
+        @coroutine
+        def join(self) -> None: ...
diff --git a/typeshed/stdlib/3.4/asyncio/streams.pyi b/typeshed/stdlib/3.4/asyncio/streams.pyi
index ed20303..82893f0 100644
--- a/typeshed/stdlib/3.4/asyncio/streams.pyi
+++ b/typeshed/stdlib/3.4/asyncio/streams.pyi
@@ -1,12 +1,12 @@
 import socket
-from typing import Any, Callable, Generator, Iterable, Tuple
+from typing import Any, Awaitable, Callable, Generator, Iterable, Optional, Tuple
 
 from . import coroutines
 from . import events
 from . import protocols
 from . import transports
 
-ClientConnectedCallback = Callable[[Tuple[StreamReader, StreamWriter]], None]
+ClientConnectedCallback = Callable[[StreamReader, StreamWriter], Optional[Awaitable[None]]]
 
 
 __all__ = ...  # type: str
@@ -83,6 +83,7 @@ class StreamWriter:
     def can_write_eof(self) -> bool: ...
     def close(self) -> None: ...
     def get_extra_info(self, name: str, default: Any = ...) -> Any: ...
+    @coroutines.coroutine
     def drain(self) -> None: ...
 
 class StreamReader:
diff --git a/typeshed/stdlib/3.4/asyncio/subprocess.pyi b/typeshed/stdlib/3.4/asyncio/subprocess.pyi
index 2416f92..b492506 100644
--- a/typeshed/stdlib/3.4/asyncio/subprocess.pyi
+++ b/typeshed/stdlib/3.4/asyncio/subprocess.pyi
@@ -3,7 +3,7 @@ from asyncio import protocols
 from asyncio import streams
 from asyncio import transports
 from asyncio.coroutines import coroutine
-from typing import Any, AnyStr, Tuple, Union
+from typing import Any, AnyStr, Optional, Tuple, Union
 
 __all__ = ...  # type: str
 
@@ -33,7 +33,7 @@ class Process:
     def terminate(self) -> None: ...
     def kill(self) -> None: ...
     @coroutine
-    def communicate(self, input: bytes = ...) -> Tuple[bytes, bytes]: ...
+    def communicate(self, input: Optional[bytes] = ...) -> Tuple[bytes, bytes]: ...
 
 
 @coroutine
diff --git a/typeshed/stdlib/3.4/asyncio/tasks.pyi b/typeshed/stdlib/3.4/asyncio/tasks.pyi
index a33fd15..e95d758 100644
--- a/typeshed/stdlib/3.4/asyncio/tasks.pyi
+++ b/typeshed/stdlib/3.4/asyncio/tasks.pyi
@@ -8,28 +8,28 @@ from .futures import Future
 __all__ = ...  # type: str
 
 _T = TypeVar('_T')
+_FutureT = Union[Future[_T], Generator[Any, None, _T], Awaitable[_T]]
 
 FIRST_EXCEPTION = 'FIRST_EXCEPTION'
 FIRST_COMPLETED = 'FIRST_COMPLETED'
 ALL_COMPLETED = 'ALL_COMPLETED'
 
-def as_completed(fs: Sequence[Future[_T]], *, loop: AbstractEventLoop = ...,
+def as_completed(fs: Sequence[_FutureT[_T]], *, loop: AbstractEventLoop = ...,
                  timeout=None) -> Iterator[Generator[Any, None, _T]]: ...
-def ensure_future(coro_or_future: Union[Future[_T], Generator[Any, None, _T]],
+def ensure_future(coro_or_future: _FutureT[_T],
                   *, loop: AbstractEventLoop = ...) -> Future[_T]: ...
 # TODO: gather() should use variadic type vars instead of _TAny.
 _TAny = Any
-def gather(*coros_or_futures: Union[Future[_TAny], Generator[Any, None, _TAny], Awaitable[_TAny]],
+def gather(*coros_or_futures: _FutureT[_TAny],
            loop: AbstractEventLoop = ..., return_exceptions: bool = False) -> Future[List[_TAny]]: ...
-def run_coroutine_threadsafe(coro: Union[Generator[Any, None, _T], Coroutine[Any, None, _T], Awaitable[_T]],
+def run_coroutine_threadsafe(coro: _FutureT[_T],
                              loop: AbstractEventLoop) -> concurrent.futures.Future[_T]: ...
-def shield(arg: Union[Future[_T], Generator[Any, None, _T]],
-           *, loop: AbstractEventLoop = ...) -> Future[_T]: ...
+def shield(arg: _FutureT[_T], *, loop: AbstractEventLoop = ...) -> Future[_T]: ...
 def sleep(delay: float, result: _T = ..., loop: AbstractEventLoop = ...) -> Future[_T]: ...
-def wait(fs: List[Task[_T]], *, loop: AbstractEventLoop = ...,
+def wait(fs: Iterable[_FutureT[_T]], *, loop: AbstractEventLoop = ...,
     timeout: float = ...,
          return_when: str = ...) -> Future[Tuple[Set[Future[_T]], Set[Future[_T]]]]: ...
-def wait_for(fut: Union[Future[_T], Generator[Any, None, _T], Awaitable[_T]], timeout: Optional[float],
+def wait_for(fut: _FutureT[_T], timeout: Optional[float],
              *, loop: AbstractEventLoop = ...) -> Future[_T]: ...
 
 class Task(Future[_T], Generic[_T]):
diff --git a/typeshed/stdlib/3.4/xml/etree/ElementInclude.pyi b/typeshed/stdlib/3.4/xml/etree/ElementInclude.pyi
deleted file mode 100644
index bbe3fb3..0000000
--- a/typeshed/stdlib/3.4/xml/etree/ElementInclude.pyi
+++ /dev/null
@@ -1,19 +0,0 @@
-# Stubs for xml.etree.ElementInclude (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Union, Optional, Callable
-from .ElementTree import Element
-
-XINCLUDE = ...  # type: str
-XINCLUDE_INCLUDE = ...  # type: str
-XINCLUDE_FALLBACK = ...  # type: str
-
-class FatalIncludeError(SyntaxError): ...
-
-def default_loader(href: Union[str, bytes, int], parse: str, encoding: Optional[str]=...) -> Union[str, Element]: ...
-
-# TODO: loader is of type default_loader ie it takes a callable that has the
-# same signature as default_loader. But default_loader has a keyword argument
-# Which can't be represented using Callable...
-def include(elem: Element, loader: Callable[..., Union[str, Element]]=...) -> None: ...
diff --git a/typeshed/stdlib/3.4/xml/etree/ElementPath.pyi b/typeshed/stdlib/3.4/xml/etree/ElementPath.pyi
deleted file mode 100644
index b1ea50e..0000000
--- a/typeshed/stdlib/3.4/xml/etree/ElementPath.pyi
+++ /dev/null
@@ -1,35 +0,0 @@
-# Stubs for xml.etree.ElementPath (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Pattern, Dict, Generator, Tuple, List, Union, TypeVar, Callable, Optional
-from .ElementTree import Element
-
-xpath_tokenizer_re = ...  # type: Pattern
-
-_token = Tuple[str, str]
-_next = Callable[[], _token]
-_callback = Callable[['_SelectorContext', List[Element]], Generator[Element, None, None]]
-
-def xpath_tokenizer(pattern: str, namespaces: Dict[str, str]=...) -> Generator[_token, None, None]: ...
-def get_parent_map(context: '_SelectorContext') -> Dict[Element, Element]: ...
-def prepare_child(next: _next, token: _token) -> _callback: ...
-def prepare_star(next: _next, token: _token) -> _callback: ...
-def prepare_self(next: _next, token: _token) -> _callback: ...
-def prepare_descendant(next: _next, token: _token) -> _callback: ...
-def prepare_parent(next: _next, token: _token) -> _callback: ...
-def prepare_predicate(next: _next, token: _token) -> _callback: ...
-
-ops = ...  # type: Dict[str, Callable[[_next, _token], _callback]]
-
-class _SelectorContext:
-    parent_map = ...  # type: Dict[Element, Element]
-    root = ...  # type: Element
-    def __init__(self, root: Element) -> None: ...
-
-_T = TypeVar('_T')
-
-def iterfind(elem: Element, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
-def find(elem: Element, path: str, namespaces: Dict[str, str]=...) -> Optional[Element]: ...
-def findall(elem: Element, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
-def findtext(elem: Element, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
diff --git a/typeshed/stdlib/3.4/xml/etree/ElementTree.pyi b/typeshed/stdlib/3.4/xml/etree/ElementTree.pyi
deleted file mode 100644
index 0592be7..0000000
--- a/typeshed/stdlib/3.4/xml/etree/ElementTree.pyi
+++ /dev/null
@@ -1,118 +0,0 @@
-# Stubs for xml.etree.ElementTree (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any, AnyStr, Union, IO, Callable, Dict, List, Tuple, Sequence, Iterator, TypeVar, Optional, KeysView, ItemsView, Generator
-import io
-
-VERSION = ...  # type: str
-
-class ParseError(SyntaxError): ...
-
-def iselement(element: 'Element') -> bool: ...
-
-_Ss = TypeVar('_Ss', str, bytes)
-_T = TypeVar('_T')
-_str_or_bytes = Union[str, bytes]
-
-class Element(Sequence['Element']):
-    tag = ...  # type: _str_or_bytes
-    attrib = ...  # type: Dict[_str_or_bytes, _str_or_bytes]
-    text = ...  # type: Optional[_str_or_bytes]
-    tail = ...  # type: Optional[_str_or_bytes]
-    def __init__(self, tag: Union[AnyStr, Callable[..., 'Element']], attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> None: ...
-    def append(self, subelement: 'Element') -> None: ...
-    def clear(self) -> None: ...
-    def copy(self) -> 'Element': ...
-    def extend(self, elements: Sequence['Element']) -> None: ...
-    def find(self, path: str, namespaces: Dict[str, str]=...) -> Optional['Element']: ...
-    def findall(self, path: str, namespaces: Dict[str, str]=...) -> List['Element']: ...
-    def findtext(self, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
-    def get(self, key: AnyStr, default: _T=...) -> Union[AnyStr, _T]: ...
-    def getchildren(self) -> List['Element']: ...
-    def getiterator(self, tag: Union[str, AnyStr]=...) -> List['Element']: ...
-    def insert(self, index: int, subelement: 'Element') -> None: ...
-    def items(self) -> ItemsView[AnyStr, AnyStr]: ...
-    def iter(self, tag: Union[str, AnyStr]=...) -> Generator['Element', None, None]: ...
-    def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List['Element']: ...
-    def itertext(self) -> Generator[str, None, None]: ...
-    def keys(self) -> KeysView[AnyStr]: ...
-    def makeelement(self, tag: _Ss, attrib: Dict[_Ss, _Ss]) -> 'Element': ...
-    def remove(self, subelement: 'Element') -> None: ...
-    def set(self, key: AnyStr, value: AnyStr) -> None: ...
-    def __bool__(self) -> bool: ...
-    def __delitem__(self, index: int) -> None: ...
-    def __getitem__(self, index) -> 'Element': ...
-    def __len__(self) -> int: ...
-    def __setitem__(self, index: int, element: 'Element') -> None: ...
-
-def SubElement(parent: Element, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> Element: ...
-def Comment(text: _str_or_bytes=...) -> Element: ...
-def ProcessingInstruction(target: str, text: str=...) -> Element: ...
-
-PI = ...  # type: Callable[..., Element]
-
-class QName:
-    text = ...  # type: str
-    def __init__(self, text_or_uri: str, tag: str=...) -> None: ...
-
-
-_file_or_filename = Union[str, bytes, int, IO[Any]]
-
-class ElementTree:
-    def __init__(self, element: Element=..., file: _file_or_filename=...) -> None: ...
-    def getroot(self) -> Element: ...
-    def parse(self, source: _file_or_filename, parser: 'XMLParser'=...) -> Element: ...
-    def iter(self, tag: Union[str, AnyStr]=...) -> Generator[Element, None, None]: ...
-    def getiterator(self, tag: Union[str, AnyStr]=...) -> List[Element]: ...
-    def find(self, path: str, namespaces: Dict[str, str]=...) -> Optional[Element]: ...
-    def findtext(self, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
-    def findall(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
-    def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
-    def write(self, file_or_filename: _file_or_filename, encoding: str=..., xml_declaration: Optional[bool]=..., default_namespace: str=..., method: str=..., *, short_empty_elements: bool=...) -> None: ...
-    def write_c14n(self, file: _file_or_filename) -> None: ...
-
-def register_namespace(prefix: str, uri: str) -> None: ...
-def tostring(element: Element, encoding: str=..., method: str=..., *, short_empty_elements: bool=...) -> str: ...
-def tostringlist(element: Element, encoding: str=..., method: str=..., *, short_empty_elements: bool=...) -> List[str]: ...
-def dump(elem: Element) -> None: ...
-def parse(source: _file_or_filename, parser: 'XMLParser'=...) -> ElementTree: ...
-def iterparse(source: _file_or_filename, events: Sequence[str]=..., parser: 'XMLParser'=...) -> Iterator[Tuple[str, Element]]: ...
-
-class XMLPullParser:
-    def __init__(self, events: Sequence[str]=..., *, _parser: 'XMLParser'=...) -> None: ...
-    def feed(self, data: bytes) -> None: ...
-    def close(self) -> None: ...
-    def read_events(self) -> Iterator[Tuple[str, Element]]: ...
-
-class _IterParseIterator:
-    root = ...  # type: Any
-    def __init__(self, source: _file_or_filename, events: Sequence[str], parser: 'XMLParser', close_source: bool=...) -> None: ...
-    def __next__(self) -> Tuple[str, Element]: ...
-    def __iter__(self) -> _IterParseIterator: ...
-
-def XML(text: AnyStr, parser: 'XMLParser'=...) -> Element: ...
-def XMLID(text: AnyStr, parser: 'XMLParser'=...) -> Tuple[Element, Dict[str, Element]]: ...
-
-# TODO-improve this type
-fromstring = ...  # type: Callable[..., Element]
-
-def fromstringlist(sequence: Sequence[AnyStr], parser: 'XMLParser'=...) -> Element: ...
-
-class TreeBuilder:
-    def __init__(self, element_factory: Callable[[AnyStr, Dict[AnyStr, AnyStr]], Element]=...) -> None: ...
-    def close(self) -> Element: ...
-    def data(self, data: AnyStr) -> None: ...
-    def start(self, tag: AnyStr, attrs: Dict[AnyStr, AnyStr]) -> Element: ...
-    def end(self, tag: AnyStr) -> Element: ...
-
-class XMLParser:
-    parser = ...  # type: Any
-    target = ...  # type: TreeBuilder
-    # TODO-what is entity used for???
-    entity = ...  # type: Any
-    version = ...  # type: str
-    def __init__(self, html: int=..., target: TreeBuilder=..., encoding: str=...) -> None: ...
-    def doctype(self, name: str, pubid: str, system: str) -> None: ...
-    def close(self) -> Any: ...  # TODO-most of the time, this will be Element, but it can be anything target.close() returns
-    def feed(self, data: AnyStr)-> None: ...
diff --git a/typeshed/stdlib/3.4/xml/etree/cElementTree.pyi b/typeshed/stdlib/3.4/xml/etree/cElementTree.pyi
deleted file mode 100644
index 8f689de..0000000
--- a/typeshed/stdlib/3.4/xml/etree/cElementTree.pyi
+++ /dev/null
@@ -1,5 +0,0 @@
-# Stubs for xml.etree.cElementTree (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from xml.etree.ElementTree import *  # noqa: F403
diff --git a/typeshed/stdlib/3.5/xml/etree/ElementInclude.pyi b/typeshed/stdlib/3.5/xml/etree/ElementInclude.pyi
deleted file mode 100644
index bbe3fb3..0000000
--- a/typeshed/stdlib/3.5/xml/etree/ElementInclude.pyi
+++ /dev/null
@@ -1,19 +0,0 @@
-# Stubs for xml.etree.ElementInclude (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Union, Optional, Callable
-from .ElementTree import Element
-
-XINCLUDE = ...  # type: str
-XINCLUDE_INCLUDE = ...  # type: str
-XINCLUDE_FALLBACK = ...  # type: str
-
-class FatalIncludeError(SyntaxError): ...
-
-def default_loader(href: Union[str, bytes, int], parse: str, encoding: Optional[str]=...) -> Union[str, Element]: ...
-
-# TODO: loader is of type default_loader ie it takes a callable that has the
-# same signature as default_loader. But default_loader has a keyword argument
-# Which can't be represented using Callable...
-def include(elem: Element, loader: Callable[..., Union[str, Element]]=...) -> None: ...
diff --git a/typeshed/stdlib/3.5/xml/etree/ElementPath.pyi b/typeshed/stdlib/3.5/xml/etree/ElementPath.pyi
deleted file mode 100644
index b1ea50e..0000000
--- a/typeshed/stdlib/3.5/xml/etree/ElementPath.pyi
+++ /dev/null
@@ -1,35 +0,0 @@
-# Stubs for xml.etree.ElementPath (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Pattern, Dict, Generator, Tuple, List, Union, TypeVar, Callable, Optional
-from .ElementTree import Element
-
-xpath_tokenizer_re = ...  # type: Pattern
-
-_token = Tuple[str, str]
-_next = Callable[[], _token]
-_callback = Callable[['_SelectorContext', List[Element]], Generator[Element, None, None]]
-
-def xpath_tokenizer(pattern: str, namespaces: Dict[str, str]=...) -> Generator[_token, None, None]: ...
-def get_parent_map(context: '_SelectorContext') -> Dict[Element, Element]: ...
-def prepare_child(next: _next, token: _token) -> _callback: ...
-def prepare_star(next: _next, token: _token) -> _callback: ...
-def prepare_self(next: _next, token: _token) -> _callback: ...
-def prepare_descendant(next: _next, token: _token) -> _callback: ...
-def prepare_parent(next: _next, token: _token) -> _callback: ...
-def prepare_predicate(next: _next, token: _token) -> _callback: ...
-
-ops = ...  # type: Dict[str, Callable[[_next, _token], _callback]]
-
-class _SelectorContext:
-    parent_map = ...  # type: Dict[Element, Element]
-    root = ...  # type: Element
-    def __init__(self, root: Element) -> None: ...
-
-_T = TypeVar('_T')
-
-def iterfind(elem: Element, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
-def find(elem: Element, path: str, namespaces: Dict[str, str]=...) -> Optional[Element]: ...
-def findall(elem: Element, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
-def findtext(elem: Element, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
diff --git a/typeshed/stdlib/3.5/xml/etree/ElementTree.pyi b/typeshed/stdlib/3.5/xml/etree/ElementTree.pyi
deleted file mode 100644
index 0592be7..0000000
--- a/typeshed/stdlib/3.5/xml/etree/ElementTree.pyi
+++ /dev/null
@@ -1,118 +0,0 @@
-# Stubs for xml.etree.ElementTree (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any, AnyStr, Union, IO, Callable, Dict, List, Tuple, Sequence, Iterator, TypeVar, Optional, KeysView, ItemsView, Generator
-import io
-
-VERSION = ...  # type: str
-
-class ParseError(SyntaxError): ...
-
-def iselement(element: 'Element') -> bool: ...
-
-_Ss = TypeVar('_Ss', str, bytes)
-_T = TypeVar('_T')
-_str_or_bytes = Union[str, bytes]
-
-class Element(Sequence['Element']):
-    tag = ...  # type: _str_or_bytes
-    attrib = ...  # type: Dict[_str_or_bytes, _str_or_bytes]
-    text = ...  # type: Optional[_str_or_bytes]
-    tail = ...  # type: Optional[_str_or_bytes]
-    def __init__(self, tag: Union[AnyStr, Callable[..., 'Element']], attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> None: ...
-    def append(self, subelement: 'Element') -> None: ...
-    def clear(self) -> None: ...
-    def copy(self) -> 'Element': ...
-    def extend(self, elements: Sequence['Element']) -> None: ...
-    def find(self, path: str, namespaces: Dict[str, str]=...) -> Optional['Element']: ...
-    def findall(self, path: str, namespaces: Dict[str, str]=...) -> List['Element']: ...
-    def findtext(self, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
-    def get(self, key: AnyStr, default: _T=...) -> Union[AnyStr, _T]: ...
-    def getchildren(self) -> List['Element']: ...
-    def getiterator(self, tag: Union[str, AnyStr]=...) -> List['Element']: ...
-    def insert(self, index: int, subelement: 'Element') -> None: ...
-    def items(self) -> ItemsView[AnyStr, AnyStr]: ...
-    def iter(self, tag: Union[str, AnyStr]=...) -> Generator['Element', None, None]: ...
-    def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List['Element']: ...
-    def itertext(self) -> Generator[str, None, None]: ...
-    def keys(self) -> KeysView[AnyStr]: ...
-    def makeelement(self, tag: _Ss, attrib: Dict[_Ss, _Ss]) -> 'Element': ...
-    def remove(self, subelement: 'Element') -> None: ...
-    def set(self, key: AnyStr, value: AnyStr) -> None: ...
-    def __bool__(self) -> bool: ...
-    def __delitem__(self, index: int) -> None: ...
-    def __getitem__(self, index) -> 'Element': ...
-    def __len__(self) -> int: ...
-    def __setitem__(self, index: int, element: 'Element') -> None: ...
-
-def SubElement(parent: Element, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> Element: ...
-def Comment(text: _str_or_bytes=...) -> Element: ...
-def ProcessingInstruction(target: str, text: str=...) -> Element: ...
-
-PI = ...  # type: Callable[..., Element]
-
-class QName:
-    text = ...  # type: str
-    def __init__(self, text_or_uri: str, tag: str=...) -> None: ...
-
-
-_file_or_filename = Union[str, bytes, int, IO[Any]]
-
-class ElementTree:
-    def __init__(self, element: Element=..., file: _file_or_filename=...) -> None: ...
-    def getroot(self) -> Element: ...
-    def parse(self, source: _file_or_filename, parser: 'XMLParser'=...) -> Element: ...
-    def iter(self, tag: Union[str, AnyStr]=...) -> Generator[Element, None, None]: ...
-    def getiterator(self, tag: Union[str, AnyStr]=...) -> List[Element]: ...
-    def find(self, path: str, namespaces: Dict[str, str]=...) -> Optional[Element]: ...
-    def findtext(self, path: str, default: _T=..., namespaces: Dict[str, str]=...) -> Union[_T, str]: ...
-    def findall(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
-    def iterfind(self, path: str, namespaces: Dict[str, str]=...) -> List[Element]: ...
-    def write(self, file_or_filename: _file_or_filename, encoding: str=..., xml_declaration: Optional[bool]=..., default_namespace: str=..., method: str=..., *, short_empty_elements: bool=...) -> None: ...
-    def write_c14n(self, file: _file_or_filename) -> None: ...
-
-def register_namespace(prefix: str, uri: str) -> None: ...
-def tostring(element: Element, encoding: str=..., method: str=..., *, short_empty_elements: bool=...) -> str: ...
-def tostringlist(element: Element, encoding: str=..., method: str=..., *, short_empty_elements: bool=...) -> List[str]: ...
-def dump(elem: Element) -> None: ...
-def parse(source: _file_or_filename, parser: 'XMLParser'=...) -> ElementTree: ...
-def iterparse(source: _file_or_filename, events: Sequence[str]=..., parser: 'XMLParser'=...) -> Iterator[Tuple[str, Element]]: ...
-
-class XMLPullParser:
-    def __init__(self, events: Sequence[str]=..., *, _parser: 'XMLParser'=...) -> None: ...
-    def feed(self, data: bytes) -> None: ...
-    def close(self) -> None: ...
-    def read_events(self) -> Iterator[Tuple[str, Element]]: ...
-
-class _IterParseIterator:
-    root = ...  # type: Any
-    def __init__(self, source: _file_or_filename, events: Sequence[str], parser: 'XMLParser', close_source: bool=...) -> None: ...
-    def __next__(self) -> Tuple[str, Element]: ...
-    def __iter__(self) -> _IterParseIterator: ...
-
-def XML(text: AnyStr, parser: 'XMLParser'=...) -> Element: ...
-def XMLID(text: AnyStr, parser: 'XMLParser'=...) -> Tuple[Element, Dict[str, Element]]: ...
-
-# TODO-improve this type
-fromstring = ...  # type: Callable[..., Element]
-
-def fromstringlist(sequence: Sequence[AnyStr], parser: 'XMLParser'=...) -> Element: ...
-
-class TreeBuilder:
-    def __init__(self, element_factory: Callable[[AnyStr, Dict[AnyStr, AnyStr]], Element]=...) -> None: ...
-    def close(self) -> Element: ...
-    def data(self, data: AnyStr) -> None: ...
-    def start(self, tag: AnyStr, attrs: Dict[AnyStr, AnyStr]) -> Element: ...
-    def end(self, tag: AnyStr) -> Element: ...
-
-class XMLParser:
-    parser = ...  # type: Any
-    target = ...  # type: TreeBuilder
-    # TODO-what is entity used for???
-    entity = ...  # type: Any
-    version = ...  # type: str
-    def __init__(self, html: int=..., target: TreeBuilder=..., encoding: str=...) -> None: ...
-    def doctype(self, name: str, pubid: str, system: str) -> None: ...
-    def close(self) -> Any: ...  # TODO-most of the time, this will be Element, but it can be anything target.close() returns
-    def feed(self, data: AnyStr)-> None: ...
diff --git a/typeshed/stdlib/3.5/xml/etree/cElementTree.pyi b/typeshed/stdlib/3.5/xml/etree/cElementTree.pyi
deleted file mode 100644
index 8f689de..0000000
--- a/typeshed/stdlib/3.5/xml/etree/cElementTree.pyi
+++ /dev/null
@@ -1,5 +0,0 @@
-# Stubs for xml.etree.cElementTree (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from xml.etree.ElementTree import *  # noqa: F403
diff --git a/typeshed/stdlib/3/ast.pyi b/typeshed/stdlib/3/ast.pyi
index f9488c9..5345676 100644
--- a/typeshed/stdlib/3/ast.pyi
+++ b/typeshed/stdlib/3/ast.pyi
@@ -25,7 +25,7 @@ class NodeVisitor():
 class NodeTransformer(NodeVisitor):
     def generic_visit(self, node: AST) -> None: ...
 
-def parse(source: Union[str, bytes], filename: Union[str, bytes] = ..., mode: str = ...) -> AST: ...
+def parse(source: Union[str, bytes], filename: Union[str, bytes] = ..., mode: str = ...) -> Module: ...
 def copy_location(new_node: AST, old_node: AST) -> AST: ...
 def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ...
 def fix_missing_locations(node: AST) -> AST: ...
diff --git a/typeshed/stdlib/3/builtins.pyi b/typeshed/stdlib/3/builtins.pyi
index 7000ad5..952cb3d 100644
--- a/typeshed/stdlib/3/builtins.pyi
+++ b/typeshed/stdlib/3/builtins.pyi
@@ -46,6 +46,7 @@ class object:
     def __format__(self, format_spec: str) -> str: ...
     def __getattribute__(self, name: str) -> Any: ...
     def __delattr__(self, name: str) -> None: ...
+    def __sizeof__(self) -> int: ...
 
     if sys.version_info >= (3, 6):
         def __init_subclass__(cls) -> None: ...
@@ -71,6 +72,16 @@ class type:
     # Note: the documentation doesnt specify what the return type is, the standard
     # implementation seems to be returning a list.
     def mro(self) -> List[type]: ...
+    def __instancecheck__(self, instance: Any) -> bool: ...
+    def __subclasscheck__(self, subclass: type) -> bool: ...
+
+class super(object):
+    @overload
+    def __init__(self, t: Any, obj: Any) -> None: ...
+    @overload
+    def __init__(self, t: Any) -> None: ...
+    @overload
+    def __init__(self) -> None: ...
 
 class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
     def __init__(self, x: Union[SupportsInt, str, bytes] = ..., base: int = ...) -> None: ...
@@ -120,6 +131,7 @@ class int(SupportsInt, SupportsFloat, SupportsAbs[int]):
     def __int__(self) -> int: return self
     def __abs__(self) -> int: ...
     def __hash__(self) -> int: ...
+    def __bool__(self) -> bool: ...
 
 class float(SupportsFloat, SupportsInt, SupportsAbs[float]):
     def __init__(self, x: Union[SupportsFloat, str, bytes] = ...) -> None: ...
@@ -158,6 +170,7 @@ class float(SupportsFloat, SupportsInt, SupportsAbs[float]):
     def __float__(self) -> float: ...
     def __abs__(self) -> float: ...
     def __hash__(self) -> int: ...
+    def __bool__(self) -> bool: ...
 
 class complex(SupportsAbs[float]):
     @overload
@@ -191,6 +204,7 @@ class complex(SupportsAbs[float]):
     def __str__(self) -> str: ...
     def __abs__(self) -> float: ...
     def __hash__(self) -> int: ...
+    def __bool__(self) -> bool: ...
 
 class str(Sequence[str]):
     @overload
@@ -469,10 +483,13 @@ class bool(int, SupportsInt, SupportsFloat):
     def __init__(self, o: object = ...) -> None: ...
 
 class slice:
-    start = 0
-    step = 0
-    stop = 0
-    def __init__(self, start: int, stop: int = 0, step: int = 0) -> None: ...
+    start = ...  # type: Optional[int]
+    step = ...  # type: Optional[int]
+    stop = ...  # type: Optional[int]
+    @overload
+    def __init__(self, stop: int = None) -> None: ...
+    @overload
+    def __init__(self, start: int = None, stop: int = None, step: int = None) -> None: ...
 
 class tuple(Sequence[_T_co], Generic[_T_co]):
     def __init__(self, iterable: Iterable[_T_co] = ...) -> None: ...
@@ -556,7 +573,6 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     def __init__(self, iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ...
     def clear(self) -> None: ...
     def copy(self) -> Dict[_KT, _VT]: ...
-    def get(self, k: _KT, default: _VT = None) -> _VT: ...
     def pop(self, k: _KT, default: _VT = None) -> _VT: ...
     def popitem(self) -> Tuple[_KT, _VT]: ...
     def setdefault(self, k: _KT, default: _VT = None) -> _VT: ...
@@ -577,7 +593,6 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     def __getitem__(self, k: _KT) -> _VT: ...
     def __setitem__(self, k: _KT, v: _VT) -> None: ...
     def __delitem__(self, v: _KT) -> None: ...
-    def __contains__(self, o: object) -> bool: ...
     def __iter__(self) -> Iterator[_KT]: ...
     def __str__(self) -> str: ...
 
@@ -714,7 +729,7 @@ def hash(o: object) -> int: ...
 def help(*args: Any, **kwds: Any) -> None: ...
 def hex(i: int) -> str: ...  # TODO __index__
 def id(o: object) -> int: ...
-def input(prompt: str = None) -> str: ...
+def input(prompt: Any = None) -> str: ...
 @overload
 def iter(iterable: Iterable[_T]) -> Iterator[_T]: ...
 @overload
@@ -740,7 +755,7 @@ def min(iterable: Iterable[_T], key: Callable[[_T], Any] = ..., default: _T = ..
 @overload
 def next(i: Iterator[_T]) -> _T: ...
 @overload
-def next(i: Iterator[_T], default: _T) -> _T: ...
+def next(i: Iterator[_T], default: _VT) -> Union[_T, _VT]: ...
 def oct(i: int) -> str: ...  # TODO __index__
 
 if sys.version_info >= (3, 6):
diff --git a/typeshed/stdlib/3/calendar.pyi b/typeshed/stdlib/3/calendar.pyi
index 80848fd..a9ce686 100644
--- a/typeshed/stdlib/3/calendar.pyi
+++ b/typeshed/stdlib/3/calendar.pyi
@@ -1,6 +1,9 @@
-from typing import Any, Iterable, List, Optional, Tuple, Sequence
 import datetime
 
+from time import struct_time
+from typing import Any, Iterable, List, Optional, Tuple, Sequence, Union
+
+
 LocaleType = Tuple[Optional[str], Optional[str]]
 
 class IllegalMonthError(ValueError):
@@ -72,7 +75,7 @@ c = ...  # type: TextCalendar
 def setfirstweekday(firstweekday: int) -> None: ...
 def format(cols: int, colwidth: int = ..., spacing: int = ...) -> str: ...
 def formatstring(cols: int, colwidth: int = ..., spacing: int = ...) -> str: ...
-def timegm(tuple: Tuple[int, ...]) -> int: ...
+def timegm(tuple: Union[Tuple[int, ...], struct_time]) -> int: ...
 
 # Data attributes
 day_name = ...  # type: Sequence[str]
diff --git a/typeshed/stdlib/3/codecs.pyi b/typeshed/stdlib/3/codecs.pyi
index 287b7de..cc56eff 100644
--- a/typeshed/stdlib/3/codecs.pyi
+++ b/typeshed/stdlib/3/codecs.pyi
@@ -29,7 +29,7 @@ _decode_type = Callable[[_encoded], _decoded]  # signature of Codec().decode
 _stream_reader_type = Callable[[BinaryIO], 'StreamReader']  # signature of StreamReader __init__
 _stream_writer_type = Callable[[BinaryIO], 'StreamWriter']  # signature of StreamWriter __init__
 _incremental_encoder_type = Callable[[], 'IncrementalEncoder']  # signature of IncrementalEncoder __init__
-_incremental_decode_type = Callable[[], 'IncrementalDecoder']  # signature of IncrementalDecoder __init__
+_incremental_decoder_type = Callable[[], 'IncrementalDecoder']  # signature of IncrementalDecoder __init__
 
 
 def encode(obj: _decoded, encoding: str = ..., errors: str = ...) -> _encoded:
@@ -40,7 +40,7 @@ def decode(obj: _encoded, encoding: str = ..., errors: str = ...) -> _decoded:
 def lookup(encoding: str) -> 'CodecInfo':
     ...
 class CodecInfo(Tuple[_encode_type, _decode_type, _stream_reader_type, _stream_writer_type]):
-    def __init__(self, encode: _encode_type, decode: _decode_type, streamreader: _stream_reader_type = ..., streamwriter: _stream_writer_type = ..., incrementalencoder: _incremental_encoder_type = ..., incrementaldecoder: _incremental_decode_type = ..., name: str = ...) -> None:
+    def __init__(self, encode: _encode_type, decode: _decode_type, streamreader: _stream_reader_type = ..., streamwriter: _stream_writer_type = ..., incrementalencoder: _incremental_encoder_type = ..., incrementaldecoder: _incremental_decoder_type = ..., name: str = ...) -> None:
         self.encode = encode
         self.decode = decode
         self.streamreader = streamreader
@@ -51,11 +51,11 @@ class CodecInfo(Tuple[_encode_type, _decode_type, _stream_reader_type, _stream_w
 
 def getencoder(encoding: str) -> _encode_type:
     ...
-def getdecoder(encoding: str) -> _encode_type:
+def getdecoder(encoding: str) -> _decode_type:
     ...
 def getincrementalencoder(encoding: str) -> _incremental_encoder_type:
     ...
-def getincrementaldecoder(encoding: str) -> _incremental_encoder_type:
+def getincrementaldecoder(encoding: str) -> _incremental_decoder_type:
     ...
 def getreader(encoding: str) -> _stream_reader_type:
     ...
diff --git a/typeshed/stdlib/3/collections/__init__.pyi b/typeshed/stdlib/3/collections/__init__.pyi
index 00f463b..5cb89f7 100644
--- a/typeshed/stdlib/3/collections/__init__.pyi
+++ b/typeshed/stdlib/3/collections/__init__.pyi
@@ -5,6 +5,7 @@
 # TODO more abstract base classes (interfaces in mypy)
 
 # These are not exported.
+import sys
 from typing import (
     TypeVar, Generic, Dict, overload, List, Tuple,
     Callable, Any, Type, Optional, Union
@@ -35,6 +36,8 @@ from typing import (
     MutableSet as MutableSet,
     AbstractSet as Set,
 )
+if sys.version_info >= (3, 6):
+    from typing import AsyncGenerator as AsyncGenerator
 
 _T = TypeVar('_T')
 _KT = TypeVar('_KT')
@@ -163,16 +166,17 @@ class defaultdict(Dict[_KT, _VT], Generic[_KT, _VT]):
     def __missing__(self, key: _KT) -> _VT: ...
     # TODO __reversed__
 
-class ChainMap(Dict[_KT, _VT], Generic[_KT, _VT]):
-    @overload
-    def __init__(self) -> None: ...
-    @overload
-    def __init__(self, *maps: Mapping[_KT, _VT]) -> None: ...
+if sys.version_info >= (3, 3):
+    class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
+        @overload
+        def __init__(self) -> None: ...
+        @overload
+        def __init__(self, *maps: Mapping[_KT, _VT]) -> None: ...
 
-    @property
-    def maps(self) -> List[Mapping[_KT, _VT]]: ...
+        @property
+        def maps(self) -> List[Mapping[_KT, _VT]]: ...
 
-    def new_child(self, m: Mapping[_KT, _VT] = ...) -> ChainMap[_KT, _VT]: ...
+        def new_child(self, m: Mapping[_KT, _VT] = ...) -> ChainMap[_KT, _VT]: ...
 
-    @property
-    def parents(self) -> ChainMap[_KT, _VT]: ...
+        @property
+        def parents(self) -> ChainMap[_KT, _VT]: ...
diff --git a/typeshed/stdlib/3/collections/abc.pyi b/typeshed/stdlib/3/collections/abc.pyi
index f7d3ae4..5106ec5 100644
--- a/typeshed/stdlib/3/collections/abc.pyi
+++ b/typeshed/stdlib/3/collections/abc.pyi
@@ -35,4 +35,5 @@ if sys.version_info >= (3, 5):
 if sys.version_info >= (3, 6):
     from . import (
         Reversible as Reversible,
+        AsyncGenerator as AsyncGenerator,
     )
diff --git a/typeshed/stdlib/3/decimal.pyi b/typeshed/stdlib/3/decimal.pyi
index 5106e46..e773288 100644
--- a/typeshed/stdlib/3/decimal.pyi
+++ b/typeshed/stdlib/3/decimal.pyi
@@ -224,7 +224,6 @@ class Decimal(SupportsInt, SupportsFloat, SupportsAbs[Decimal], SupportsRound[in
     def __rpow__(self, other: int) -> Decimal: ...
     def __rsub__(self, other: int) -> Decimal: ...
     def __rtruediv__(self, other: int) -> Decimal: ...
-    def __sizeof__(self) -> int: ...
     def __sub__(self, other: _Decimal) -> Decimal: ...
     def __truediv__(self, other: _Decimal) -> Decimal: ...
     def __trunc__(self) -> int: ...
diff --git a/typeshed/stdlib/3/itertools.pyi b/typeshed/stdlib/3/itertools.pyi
index 8296d62..889d583 100644
--- a/typeshed/stdlib/3/itertools.pyi
+++ b/typeshed/stdlib/3/itertools.pyi
@@ -17,7 +17,7 @@ def repeat(object: _T) -> Iterator[_T]: ...
 @overload
 def repeat(object: _T, times: int) -> Iterator[_T]: ...
 
-def accumulate(iterable: Iterable[_T]) -> Iterator[_T]: ...
+def accumulate(iterable: Iterable[_T], func: Callable[[_T, _T], _T] = ...) -> Iterator[_T]: ...
 
 class chain(Iterator[_T], Generic[_T]):
     def __init__(self, *iterables: Iterable[_T]) -> None: ...
diff --git a/typeshed/stdlib/3/os/__init__.pyi b/typeshed/stdlib/3/os/__init__.pyi
index 4f5deea..ecf894e 100644
--- a/typeshed/stdlib/3/os/__init__.pyi
+++ b/typeshed/stdlib/3/os/__init__.pyi
@@ -382,5 +382,7 @@ if sys.version_info >= (3, 3):
               dir_fd: int = ...) -> Iterator[Tuple[AnyStr, List[AnyStr],
                                              List[AnyStr], int]]: ...  # Unix only
 
+    def get_terminal_size(fd: int = ...) -> Tuple[int, int]: ...
+
 if sys.version_info >= (3, 4):
     def cpu_count() -> Optional[int]: ...
diff --git a/typeshed/stdlib/3/sqlite3/dbapi2.pyi b/typeshed/stdlib/3/sqlite3/dbapi2.pyi
index dcc2026..ef46085 100644
--- a/typeshed/stdlib/3/sqlite3/dbapi2.pyi
+++ b/typeshed/stdlib/3/sqlite3/dbapi2.pyi
@@ -229,7 +229,6 @@ class OptimizedUnicode:
     def __ne__(self, other): ...
     def __rmod__(self, other): ...
     def __rmul__(self, other): ...
-    def __sizeof__(self): ...
 
 class PrepareProtocol:
     def __init__(self, *args, **kwargs): ...
diff --git a/typeshed/stdlib/3/subprocess.pyi b/typeshed/stdlib/3/subprocess.pyi
index c22ba00..ebc7084 100644
--- a/typeshed/stdlib/3/subprocess.pyi
+++ b/typeshed/stdlib/3/subprocess.pyi
@@ -8,14 +8,14 @@ from types import TracebackType
 
 if sys.version_info >= (3, 5):
     class CompletedProcess:
-        args = ...  # type: Union[List, str]
+        args = ...  # type: Union[Sequence[str], str]
         returncode = ...  # type: int
         stdout = ...  # type: Any
         stderr = ...  # type: Any
         def __init__(self, args: Union[List, str],
                      returncode: int,
-                     stdout: Union[str, bytes],
-                     stderr: Union[str, bytes]) -> None: ...
+                     stdout: Union[str, bytes, None] = ...,
+                     stderr: Union[str, bytes, None] = ...) -> None: ...
         def check_returncode(self) -> None: ...
 
     # Nearly same args as Popen.__init__ except for timeout, input, and check
diff --git a/typeshed/stdlib/3/sys.pyi b/typeshed/stdlib/3/sys.pyi
index 042fcf1..b3042e2 100644
--- a/typeshed/stdlib/3/sys.pyi
+++ b/typeshed/stdlib/3/sys.pyi
@@ -7,6 +7,7 @@ from typing import (
     List, Sequence, Any, Dict, Tuple, TextIO, overload, Optional, Union,
     TypeVar, Callable, Type,
 )
+import sys
 from types import TracebackType
 from mypy_extensions import NoReturn
 
@@ -146,6 +147,10 @@ def getprofile() -> Any: ...  # TODO return type
 def gettrace() -> Any: ...  # TODO return
 def getwindowsversion() -> Any: ...  # Windows only, TODO return type
 def intern(string: str) -> str: ...
+
+if sys.version_info >= (3, 5):
+    def is_finalizing() -> bool: ...
+
 def setcheckinterval(interval: int) -> None: ...  # deprecated
 def setdlopenflags(n: int) -> None: ...  # Linux only
 def setprofile(profilefunc: Any) -> None: ...  # TODO type
diff --git a/typeshed/stdlib/3/types.pyi b/typeshed/stdlib/3/types.pyi
index 17e3347..4c9535b 100644
--- a/typeshed/stdlib/3/types.pyi
+++ b/typeshed/stdlib/3/types.pyi
@@ -27,6 +27,8 @@ class FunctionType:
     __dict__ = ...  # type: Dict[str, Any]
     __globals__ = ...  # type: Dict[str, Any]
     __name__ = ...  # type: str
+    __annotations__ = ...  # type: Dict[str, Any]
+    __kwdefaults__ = ...  # type: Dict[str, Any]
     def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
     def __get__(self, obj: Optional[object], type: Optional[type]) -> 'MethodType': ...
 LambdaType = FunctionType
diff --git a/typeshed/stdlib/3/typing.pyi b/typeshed/stdlib/3/typing.pyi
index 099f925..9c7ad8c 100644
--- a/typeshed/stdlib/3/typing.pyi
+++ b/typeshed/stdlib/3/typing.pyi
@@ -15,6 +15,7 @@ Callable = object()
 Type = object()
 _promote = object()
 no_type_check = object()
+ClassVar = object()
 
 class GenericMeta(type): ...
 
@@ -151,6 +152,24 @@ class AsyncIterator(AsyncIterable[_T_co],
     def __anext__(self) -> Awaitable[_T_co]: ...
     def __aiter__(self) -> 'AsyncIterator[_T_co]': ...
 
+if sys.version_info >= (3, 6):
+    class AsyncGenerator(AsyncIterator[_T_co], Generic[_T_co, _T_contra]):
+        @abstractmethod
+        def __anext__(self) -> Awaitable[_T_co]: ...
+
+        @abstractmethod
+        def asend(self, value: _T_contra) -> Awaitable[_T_co]: ...
+
+        @abstractmethod
+        def athrow(self, typ: Type[BaseException], val: Optional[BaseException] = None,
+                   tb: Any = None) -> Awaitable[_T_co]: ...
+
+        @abstractmethod
+        def aclose(self) -> Awaitable[_T_co]: ...
+
+        @abstractmethod
+        def __aiter__(self) -> 'AsyncGenerator[_T_co, _T_contra]': ...
+
 class Container(Generic[_T_co]):
     @abstractmethod
     def __contains__(self, x: object) -> bool: ...
@@ -250,8 +269,10 @@ class Mapping(Iterable[_KT], Container[_KT], Sized, Generic[_KT, _VT_co]):
     def __getitem__(self, k: _KT) -> _VT_co:
         ...
     # Mixin methods
-    def get(self, k: _KT, default: _VT_co = ...) -> _VT_co:  # type: ignore
-        ...
+    @overload  # type: ignore
+    def get(self, k: _KT) -> Optional[_VT_co]: ...
+    @overload  # type: ignore
+    def get(self, k: _KT, default: Union[_VT_co, _T]) -> Union[_VT_co, _T]: ...
     def items(self) -> AbstractSet[Tuple[_KT, _VT_co]]: ...
     def keys(self) -> AbstractSet[_KT]: ...
     def values(self) -> ValuesView[_VT_co]: ...
@@ -442,8 +463,17 @@ def cast(tp: Type[_T], obj: Any) -> _T: ...
 
 # Type constructors
 
-# NamedTuple is special-cased in the type checker; the initializer is ignored.
-def NamedTuple(typename: str, fields: Iterable[Tuple[str, Any]], *,
-               verbose: bool = ..., rename: bool = ..., module: str = None) -> Type[tuple]: ...
+# NamedTuple is special-cased in the type checker
+class NamedTuple(tuple):
+    _fields = ...  # type: Tuple[str, ...]
+
+    def __init__(self, typename: str, fields: Iterable[Tuple[str, Any]], *,
+                 verbose: bool = ..., rename: bool = ..., module: Any = ...) -> None: ...
+
+    @classmethod
+    def _make(cls, iterable: Iterable[Any]) -> NamedTuple: ...
+
+    def _asdict(self) -> dict: ...
+    def _replace(self, **kwargs: Any) -> NamedTuple: ...
 
 def NewType(name: str, tp: Type[_T]) -> Type[_T]: ...
diff --git a/typeshed/stdlib/3/urllib/parse.pyi b/typeshed/stdlib/3/urllib/parse.pyi
index d427e40..25aec92 100644
--- a/typeshed/stdlib/3/urllib/parse.pyi
+++ b/typeshed/stdlib/3/urllib/parse.pyi
@@ -1,5 +1,6 @@
 # Stubs for urllib.parse
-from typing import Any, List, Dict, Tuple, AnyStr, Generic, overload, Sequence, Mapping, Union, NamedTuple
+from typing import Any, List, Dict, Tuple, AnyStr, Generic, overload, Sequence, Mapping, Union, NamedTuple, Callable
+import sys
 
 __all__ = (
     'urlparse',
@@ -123,11 +124,19 @@ def urldefrag(url: str) -> DefragResult: ...
 @overload
 def urldefrag(url: bytes) -> DefragResultBytes: ...
 
-def urlencode(query: Union[Mapping[Any, Any],
-                           Mapping[Any, Sequence[Any]],
-                           Sequence[Tuple[Any, Any]],
-                           Sequence[Tuple[Any, Sequence[Any]]]],
-              doseq: bool = ..., safe: AnyStr = ..., encoding: str = ..., errors: str = ...) -> str: ...
+if sys.version_info >= (3, 5):
+    def urlencode(query: Union[Mapping[Any, Any],
+                               Mapping[Any, Sequence[Any]],
+                               Sequence[Tuple[Any, Any]],
+                               Sequence[Tuple[Any, Sequence[Any]]]],
+                  doseq: bool = ..., safe: AnyStr = ..., encoding: str = ..., errors: str = ...,
+                  quote_via: Callable[[str, AnyStr, str, str], str] = ...) -> str: ...
+else:
+    def urlencode(query: Union[Mapping[Any, Any],
+                               Mapping[Any, Sequence[Any]],
+                               Sequence[Tuple[Any, Any]],
+                               Sequence[Tuple[Any, Sequence[Any]]]],
+                  doseq: bool = ..., safe: AnyStr = ..., encoding: str = ..., errors: str = ...) -> str: ...
 
 def urljoin(base: AnyStr, url: AnyStr, allow_fragments: bool = ...) -> AnyStr: ...
 
diff --git a/typeshed/stdlib/3/wsgiref/types.pyi b/typeshed/stdlib/3/wsgiref/types.pyi
new file mode 100644
index 0000000..f6acc2e
--- /dev/null
+++ b/typeshed/stdlib/3/wsgiref/types.pyi
@@ -0,0 +1,33 @@
+# Type declaration for a WSGI Function in Python 3
+#
+# wsgiref/types.py doesn't exist and neither does WSGIApplication, it's a type
+# provided for type checking purposes.
+#
+# This means you cannot simply import wsgiref.types in your code. Instead,
+# use the `TYPE_CHECKING` flag from the typing module:
+#
+#   from typing import TYPE_CHECKING
+#
+#   if TYPE_CHECKING:
+#       from wsgiref.types import WSGIApplication
+#
+# This import is now only taken into account by the type checker. Consequently,
+# you need to use 'WSGIApplication' and not simply WSGIApplication when type
+# hinting your code.  Otherwise Python will raise NameErrors.
+
+from typing import Callable, Dict, Iterable, List, Optional, Tuple, Type, Union
+from types import TracebackType
+
+_exc_info = Tuple[Optional[Type[BaseException]],
+                  Optional[BaseException],
+                  Optional[TracebackType]]
+WSGIApplication = Callable[
+    [
+        Dict[str, str],
+        Union[
+            Callable[[str, List[Tuple[str, str]]], Callable[[Union[bytes, str]], None]],
+            Callable[[str, List[Tuple[str, str]], _exc_info], Callable[[Union[bytes, str]], None]]
+        ]
+    ],
+    Iterable[Union[bytes, str]],
+]
diff --git a/typeshed/stdlib/3/xml/etree/ElementInclude.pyi b/typeshed/stdlib/3/xml/etree/ElementInclude.pyi
deleted file mode 100644
index ef1a591..0000000
--- a/typeshed/stdlib/3/xml/etree/ElementInclude.pyi
+++ /dev/null
@@ -1,19 +0,0 @@
-# Stubs for xml.etree.ElementInclude (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Union, Optional, Callable
-from .ElementTree import _ElementInterface
-
-XINCLUDE = ...  # type: str
-XINCLUDE_INCLUDE = ...  # type: str
-XINCLUDE_FALLBACK = ...  # type: str
-
-class FatalIncludeError(SyntaxError): ...
-
-def default_loader(href: Union[str, bytes, int], parse: str, encoding: Optional[str]=...) -> Union[str, _ElementInterface]: ...
-
-# TODO: loader is of type default_loader ie it takes a callable that has the
-# same signature as default_loader. But default_loader has a keyword argument
-# Which can't be represented using Callable...
-def include(elem: _ElementInterface, loader: Callable[..., Union[str, _ElementInterface]]=...) -> None: ...
diff --git a/typeshed/stdlib/3/xml/etree/ElementPath.pyi b/typeshed/stdlib/3/xml/etree/ElementPath.pyi
deleted file mode 100644
index db0aa36..0000000
--- a/typeshed/stdlib/3/xml/etree/ElementPath.pyi
+++ /dev/null
@@ -1,25 +0,0 @@
-# Stubs for xml.etree.ElementPath (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Tuple, List, Union, TypeVar, Callable, Optional
-from .ElementTree import _ElementInterface
-
-xpath_tokenizer_re = ...  # type: Callable[..., List[Tuple[str, str]]]
-
-
-class xpath_descendant_or_self: ...
-
-_T = TypeVar('_T')
-
-class Path:
-    def __init__(self, path: str) -> None: ...
-    def find(self, element: _ElementInterface) -> Optional[_ElementInterface]: ...
-    def findtext(self, element: _ElementInterface, default: _T=...) -> Union[str, _T]: ...
-    def findall(self, element: _ElementInterface) -> List[_ElementInterface]: ...
-
-def find(element: _ElementInterface, path: str) -> Optional[_ElementInterface]: ...
-
-def findtext(element: _ElementInterface, path: str, default: _T=...) -> Union[str, _T]: ...
-
-def findall(element: _ElementInterface, path: str) -> List[_ElementInterface]: ...
diff --git a/typeshed/stdlib/3/xml/etree/ElementTree.pyi b/typeshed/stdlib/3/xml/etree/ElementTree.pyi
deleted file mode 100644
index 6f26818..0000000
--- a/typeshed/stdlib/3/xml/etree/ElementTree.pyi
+++ /dev/null
@@ -1,98 +0,0 @@
-# Stubs for xml.etree.ElementTree (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any, AnyStr, Union, IO, Callable, Dict, List, Tuple, Sequence, Iterator, TypeVar, Optional, KeysView, ItemsView, Generator
-import io
-
-VERSION = ...  # type: str
-
-_Ss = TypeVar('_Ss', str, bytes)
-_T = TypeVar('_T')
-_str_or_bytes = Union[str, bytes]
-
-class _ElementInterface(Sequence['_ElementInterface']):
-    tag = ...  # type: _str_or_bytes
-    attrib = ...  # type: Dict[_str_or_bytes, _str_or_bytes]
-    text = ...  # type: Optional[_str_or_bytes]
-    tail = ...  # type: Optional[_str_or_bytes]
-    def __init__(self, tag: Union[AnyStr, Callable[..., '_ElementInterface']], attrib: Dict[AnyStr, AnyStr]) -> None: ...
-    def makeelement(self, tag: _Ss, attrib: Dict[_Ss, _Ss]) -> '_ElementInterface': ...
-    def __len__(self) -> int: ...
-    def __getitem__(self, index: int) -> '_ElementInterface': ...
-    def __setitem__(self, index: int, element: '_ElementInterface') -> None: ...
-    def __delitem__(self, index: int) -> None: ...
-    def __getslice__(self, start: int, stop: int) -> Sequence['_ElementInterface']: ...
-    def __setslice__(self, start: int, stop: int, elements: Sequence['_ElementInterface']) -> None: ...
-    def __delslice__(self, start: int, stop: int) -> None: ...
-    def append(self, element: '_ElementInterface') -> None: ...
-    def insert(self, index: int, element: '_ElementInterface') -> None: ...
-    def remove(self, element: '_ElementInterface') -> None: ...
-    def getchildren(self) -> List['_ElementInterface']: ...
-    def find(self, path: str) -> Optional['_ElementInterface']: ...
-    def findtext(self, path: str, default: _T=...) -> Union[str, _T]: ...
-    def findall(self, path: str) -> List['_ElementInterface']: ...
-    def clear(self) -> None: ...
-    def get(self, key: AnyStr, default: _T=...) -> Union[AnyStr, _T]: ...
-    def set(self, key: AnyStr, value: AnyStr) -> None: ...
-    def keys(self) -> KeysView[AnyStr]: ...
-    def items(self) -> ItemsView[AnyStr, AnyStr]: ...
-    def getiterator(self, tag: Union[str, AnyStr]=...) -> List['_ElementInterface']: ...
-
-def Element(tag: Union[AnyStr, Callable[..., _ElementInterface]], attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> _ElementInterface: ...
-def SubElement(parent: _ElementInterface, tag: AnyStr, attrib: Dict[AnyStr, AnyStr]=..., **extra: AnyStr) -> _ElementInterface: ...
-def Comment(text: _str_or_bytes=...) -> _ElementInterface: ...
-def ProcessingInstruction(target: str, text: str=...) -> _ElementInterface: ...
-
-PI = ...  # type: Callable[..., _ElementInterface]
-
-class QName:
-    text = ...  # type: str
-    def __init__(self, text_or_uri: str, tag: str=...) -> None: ...
-
-
-_file_or_filename = Union[str, bytes, int, IO[Any]]
-
-class ElementTree:
-    def __init__(self, element: _ElementInterface=..., file: _file_or_filename=...) -> None: ...
-    def getroot(self) -> _ElementInterface: ...
-    def parse(self, source: _file_or_filename, parser: 'XMLTreeBuilder'=...) -> _ElementInterface: ...
-    def getiterator(self, tag: Union[str, AnyStr]=...) -> List[_ElementInterface]: ...
-    def find(self, path: str) -> Optional[_ElementInterface]: ...
-    def findtext(self, path: str, default: _T=...) -> Union[_T, str]: ...
-    def findall(self, path: str) -> List[_ElementInterface]: ...
-    def write(self, file_or_filename: _file_or_filename, encoding: str=...) -> None: ...
-
-def iselement(element: _ElementInterface) -> bool: ...
-def dump(elem: _ElementInterface) -> None: ...
-def fixtag(tag: Union[str, QName], namespaces: Dict[str, str]) -> Tuple[str, Optional[str]]: ...
-def parse(source: _file_or_filename, parser: 'XMLTreeBuilder'=...) -> ElementTree: ...
-
-
-class iterparse:
-    def __init__(self, source: _file_or_filename, events: Sequence[str]=...) -> None: ...
-    # TODO-figure out this type...
-    def __next__(self) -> Tuple[str, _ElementInterface]: ...
-
-def XML(text: AnyStr) -> _ElementInterface: ...
-def XMLID(text: AnyStr) -> Tuple[_ElementInterface, Dict[str, _ElementInterface]]: ...
-
-# TODO-improve this type
-fromstring = ...  # type: Callable[..., _ElementInterface]
-
-def tostring(element: _ElementInterface, encoding: str=...) -> AnyStr: ...
-
-class TreeBuilder:
-    def __init__(self, element_factory: Callable[[AnyStr, Dict[AnyStr, AnyStr]], _ElementInterface]=...) -> None: ...
-    def close(self) -> _ElementInterface: ...
-    def data(self, data: AnyStr) -> None: ...
-    def start(self, tag: AnyStr, attrs: Dict[AnyStr, AnyStr]) -> _ElementInterface: ...
-    def end(self, tag: AnyStr) -> _ElementInterface: ...
-
-class XMLTreeBuilder:
-    # TODO-what is entity used for???
-    entity = ...  # type: Any
-    def __init__(self, html: int=..., target: TreeBuilder=...) -> None: ...
-    def doctype(self, name: str, pubid: str, system: str) -> None: ...
-    def close(self) -> Any: ...  # TODO-most of the time, this will be Element, but it can be anything target.close() returns
-    def feed(self, data: AnyStr)-> None: ...
diff --git a/typeshed/stdlib/3/xml/etree/cElementTree.pyi b/typeshed/stdlib/3/xml/etree/cElementTree.pyi
deleted file mode 100644
index 8f689de..0000000
--- a/typeshed/stdlib/3/xml/etree/cElementTree.pyi
+++ /dev/null
@@ -1,5 +0,0 @@
-# Stubs for xml.etree.cElementTree (Python 3.4)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from xml.etree.ElementTree import *  # noqa: F403
diff --git a/typeshed/third_party/2/OpenSSL/crypto.pyi b/typeshed/third_party/2/OpenSSL/crypto.pyi
index ef352ac..972a4f8 100644
--- a/typeshed/third_party/2/OpenSSL/crypto.pyi
+++ b/typeshed/third_party/2/OpenSSL/crypto.pyi
@@ -1,6 +1,185 @@
+# Stubs for OpenSSL.crypto (Python 2)
+
+from typing import Any, Callable, Iterable, List, Optional, Set, Text, Tuple, Union
+
+from cryptography.hazmat.primitives.asymmetric import dsa, rsa
+
+FILETYPE_PEM = ...  # type: int
+FILETYPE_ASN1 = ...  # type: int
+FILETYPE_TEXT = ...  # type: int
+TYPE_RSA = ...  # type: int
+TYPE_DSA = ...  # type: int
+
+class Error(Exception): ...
+
+class PKey:
+    def __init__(self) -> None: ...
+    def to_cryptography_key(self) -> Union[rsa.RSAPublicKey, rsa.RSAPrivateKey, dsa.DSAPublicKey, dsa.DSAPrivateKey]: ...
+    @classmethod
+    def from_cryptography_key(cls, crypto_key: Union[rsa.RSAPublicKey, rsa.RSAPrivateKey, dsa.DSAPublicKey, dsa.DSAPrivateKey]): ...
+    def generate_key(self, type: int, bits: int) -> None: ...
+    def check(self) -> bool: ...
+    def type(self) -> int: ...
+    def bits(self) -> int: ...
+
+class _EllipticCurve:
+    name = ...  # type: Text
+
+def get_elliptic_curves() -> Set[_EllipticCurve]: ...
+def get_elliptic_curve(name: str) -> _EllipticCurve: ...
+
+class X509Name:
+    def __init__(self, name: X509Name) -> None: ...
+    countryName = ...  # type: Union[str, unicode]
+    stateOrProvinceName = ...  # type: Union[str, unicode]
+    localityName = ...  # type: Union[str, unicode]
+    organizationName = ...  # type: Union[str, unicode]
+    organizationalUnitName = ...  # type: Union[str, unicode]
+    commonName = ...  # type: Union[str, unicode]
+    emailAddress = ...  # type: Union[str, unicode]
+    C = ...  # type: Union[str, unicode]
+    ST = ...  # type: Union[str, unicode]
+    L = ...  # type: Union[str, unicode]
+    O = ...  # type: Union[str, unicode]
+    OU = ...  # type: Union[str, unicode]
+    CN = ...  # type: Union[str, unicode]
+    def hash(self) -> int: ...
+    def der(self) -> bytes: ...
+    def get_components(self) -> List[Tuple[str, str]]: ...
+
+class X509Extension:
+    def __init__(self, type_name: bytes, critical: bool, value: bytes, subject: Optional[X509] = ..., issuer: Optional[X509] = ...) -> None: ...
+    def get_critical(self) -> bool: ...
+    def get_short_name(self) -> str: ...
+    def get_data(self) -> str: ...
+
+class X509Req:
+    def __init__(self) -> None: ...
+    def set_pubkey(self, pkey: PKey) -> None: ...
+    def get_pubkey(self) -> PKey: ...
+    def set_version(self, version: int) -> None: ...
+    def get_version(self) -> int: ...
+    def get_subject(self) -> X509Name: ...
+    def add_extensions(self, extensions: Iterable[X509Extension]) -> None: ...
+    def get_extensions(self) -> List[X509Extension]: ...
+    def sign(self, pkey: PKey, digest: str) -> None: ...
+    def verify(self, pkey: PKey) -> bool: ...
+
 class X509:
-    ...
+    def __init__(self) -> None: ...
+    def set_version(self, version: int) -> None: ...
+    def get_version(self) -> int: ...
+    def get_pubkey(self) -> PKey: ...
+    def set_pubkey(self, pkey: PKey) -> None: ...
+    def sign(self, pkey: PKey, digest: str) -> None: ...
+    def get_signature_algorithm(self) -> str: ...
+    def digest(self, digest_name: str) -> str: ...
+    def subject_name_hash(self) -> str: ...
+    def set_serial_number(self, serial: int) -> None: ...
+    def get_serial_number(self) -> int: ...
+    def gmtime_adj_notAfter(self, amount: int) -> None: ...
+    def gmtime_adj_notBefore(self, amount: int) -> None: ...
+    def has_expired(self) -> bool: ...
+    def get_notBefore(self) -> str: ...
+    def set_notBefore(self, when: str) -> None: ...
+    def get_notAfter(self) -> str: ...
+    def set_notAfter(self, when: str) -> None: ...
+    def get_issuer(self) -> X509Name: ...
+    def set_issuer(self, issuer: X509Name) -> None: ...
+    def get_subject(self) -> X509Name: ...
+    def set_subject(self, subject: X509Name) -> None: ...
+    def get_extension_count(self) -> int: ...
+    def add_extensions(self, extensions: Iterable[X509Extension]) -> None: ...
+    def get_extension(self, index: int) -> X509Extension: ...
+
+class X509StoreFlags:
+    CRL_CHECK = ...  # type: int
+    CRL_CHECK_ALL = ...  # type: int
+    IGNORE_CRITICAL = ...  # type: int
+    X509_STRICT = ...  # type: int
+    ALLOW_PROXY_CERTS = ...  # type: int
+    POLICY_CHECK = ...  # type: int
+    EXPLICIT_POLICY = ...  # type: int
+    INHIBIT_MAP = ...  # type: int
+    NOTIFY_POLICY = ...  # type: int
+    CHECK_SS_SIGNATURE = ...  # type: int
+    CB_ISSUER_CHECK = ...  # type: int
+
+class X509Store:
+    def __init__(self) -> None: ...
+    def add_cert(self, cert: X509) -> None: ...
+    def add_crl(self, crl: CRL) -> None: ...
+    def set_flags(self, flags: int) -> None: ...
+
+class X509StoreContextError(Exception):
+    certificate = ...  # type: X509
+    def __init__(self, message: str, certificate: X509) -> None: ...
+
+class X509StoreContext:
+    def __init__(self, store: X509Store, certificate: X509) -> None: ...
+    def set_store(self, store: X509Store) -> None: ...
+    def verify_certificate(self) -> None: ...
+
+def load_certificate(type: int, buffer: Union[str, unicode]) -> X509: ...
+def dump_certificate(type: int, cert: X509) -> bytes: ...
+def dump_publickey(type: int, pkey: PKey) -> bytes: ...
+def dump_privatekey(type: int, pkey: PKey, cipher: Optional[str] = ..., passphrase: Optional[Union[str, Callable[[int], int]]] = ...) -> bytes: ...
+
+class Revoked:
+    def __init__(self) -> None: ...
+    def set_serial(self, hex_str: str) -> None: ...
+    def get_serial(self) -> str: ...
+    def set_reason(self, reason: str) -> None: ...
+    def get_reason(self) -> str: ...
+    def all_reasons(self) -> List[str]: ...
+    def set_rev_date(self, when: str) -> None: ...
+    def get_rev_date(self) -> str: ...
+
+class CRL:
+    def __init__(self) -> None: ...
+    def get_revoked(self) -> Tuple[Revoked, ...]: ...
+    def add_revoked(self, revoked: Revoked) -> None: ...
+    def get_issuer(self) -> X509Name: ...
+    def set_version(self, version: int) -> None: ...
+    def set_lastUpdate(self, when: str) -> None: ...
+    def set_nextUpdate(self, when: str) -> None: ...
+    def sign(self, issuer_cert: X509, issuer_key: PKey, digest: str) -> None: ...
+    def export(self, cert: X509, key: PKey, type: int = ..., days: int = ..., digest: str = ...) -> bytes: ...
+
+class PKCS7:
+    def type_is_signed(self) -> bool: ...
+    def type_is_enveloped(self) -> bool: ...
+    def type_is_signedAndEnveloped(self) -> bool: ...
+    def type_is_data(self) -> bool: ...
+    def get_type_name(self) -> str: ...
+
+class PKCS12:
+    def __init__(self) -> None: ...
+    def get_certificate(self) -> X509: ...
+    def set_certificate(self, cert: X509) -> None: ...
+    def get_privatekey(self) -> PKey: ...
+    def set_privatekey(self, pkey: PKey) -> None: ...
+    def get_ca_certificates(self) -> Tuple[X509, ...]: ...
+    def set_ca_certificates(self, cacerts: Iterable[X509]) -> None: ...
+    def set_friendlyname(self, name: bytes) -> None: ...
+    def get_friendlyname(self) -> bytes: ...
+    def export(self, passphrase: Optional[str] = ..., iter: int = ..., maciter: int = ...): ...
+
+class NetscapeSPKI:
+    def __init__(self) -> None: ...
+    def sign(self, pkey: PKey, digest: str) -> None: ...
+    def verify(self, key: PKey) -> bool: ...
+    def b64_encode(self) -> str: ...
+    def get_pubkey(self) -> PKey: ...
+    def set_pubkey(self, pkey: PKey) -> None: ...
 
-def sign(key: str, data: str, digest: str) -> str: ...
-def verify(certificate: X509, signature: str, data: str, digest: str) -> None:
-    raise Exception()
+def load_publickey(type: int, buffer: Union[str, unicode]) -> PKey: ...
+def load_privatekey(type: int, buffer: bytes, passphrase: Optional[Union[str, Callable[[int], int]]] = ...): ...
+def dump_certificate_request(type: int, req: X509Req): ...
+def load_certificate_request(type, buffer: Union[str, unicode]) -> X509Req: ...
+def sign(pkey: PKey, data: Union[str, unicode], digest: str) -> bytes: ...
+def verify(cert: X509, signature: bytes, data: Union[str, unicode], digest: str) -> None: ...
+def dump_crl(type: int, crl: CRL) -> bytes: ...
+def load_crl(type: int, buffer: Union[str, unicode]) -> CRL: ...
+def load_pkcs7_data(type: int, buffer: Union[str, unicode]) -> PKCS7: ...
+def load_pkcs12(buffer: Union[str, unicode], passphrase: Optional[Union[str, Callable[[int], int]]] = ...) -> PKCS12: ...
diff --git a/typeshed/stdlib/3.4/xml/etree/__init__.pyi b/typeshed/third_party/2/cryptography/__init__.pyi
similarity index 100%
rename from typeshed/stdlib/3.4/xml/etree/__init__.pyi
rename to typeshed/third_party/2/cryptography/__init__.pyi
diff --git a/typeshed/stdlib/3.5/xml/__init__.pyi b/typeshed/third_party/2/cryptography/hazmat/__init__.pyi
similarity index 100%
rename from typeshed/stdlib/3.5/xml/__init__.pyi
rename to typeshed/third_party/2/cryptography/hazmat/__init__.pyi
diff --git a/typeshed/stdlib/3.5/xml/etree/__init__.pyi b/typeshed/third_party/2/cryptography/hazmat/primitives/__init__.pyi
similarity index 100%
rename from typeshed/stdlib/3.5/xml/etree/__init__.pyi
rename to typeshed/third_party/2/cryptography/hazmat/primitives/__init__.pyi
diff --git a/typeshed/stdlib/3/xml/etree/__init__.pyi b/typeshed/third_party/2/cryptography/hazmat/primitives/asymmetric/__init__.pyi
similarity index 100%
rename from typeshed/stdlib/3/xml/etree/__init__.pyi
rename to typeshed/third_party/2/cryptography/hazmat/primitives/asymmetric/__init__.pyi
diff --git a/typeshed/third_party/2/cryptography/hazmat/primitives/asymmetric/dsa.pyi b/typeshed/third_party/2/cryptography/hazmat/primitives/asymmetric/dsa.pyi
new file mode 100644
index 0000000..cfb0c73
--- /dev/null
+++ b/typeshed/third_party/2/cryptography/hazmat/primitives/asymmetric/dsa.pyi
@@ -0,0 +1,4 @@
+# Minimal stub expressing only the classes required by OpenSSL.crypto.
+
+class DSAPrivateKey: ...
+class DSAPublicKey: ...
diff --git a/typeshed/third_party/2/cryptography/hazmat/primitives/asymmetric/rsa.pyi b/typeshed/third_party/2/cryptography/hazmat/primitives/asymmetric/rsa.pyi
new file mode 100644
index 0000000..57e3bef
--- /dev/null
+++ b/typeshed/third_party/2/cryptography/hazmat/primitives/asymmetric/rsa.pyi
@@ -0,0 +1,4 @@
+# Minimal stub expressing only the classes required by OpenSSL.crypto.
+
+class RSAPrivateKey: ...
+class RSAPublicKey: ...
diff --git a/typeshed/third_party/2/cryptography/hazmat/primitives/serialization.pyi b/typeshed/third_party/2/cryptography/hazmat/primitives/serialization.pyi
new file mode 100644
index 0000000..57ff681
--- /dev/null
+++ b/typeshed/third_party/2/cryptography/hazmat/primitives/serialization.pyi
@@ -0,0 +1,32 @@
+# Stubs for cryptography.hazmat.primitives.serialization (Python 2)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from enum import Enum
+
+def load_pem_private_key(data, password, backend): ...
+def load_pem_public_key(data, backend): ...
+def load_der_private_key(data, password, backend): ...
+def load_der_public_key(data, backend): ...
+def load_ssh_public_key(data, backend): ...
+
+class Encoding(Enum):
+    PEM = ...  # type: str
+    DER = ...  # type: str
+
+class PrivateFormat(Enum):
+    PKCS8 = ...  # type: str
+    TraditionalOpenSSL = ...  # type: str
+
+class PublicFormat(Enum):
+    SubjectPublicKeyInfo = ...  # type: str
+    PKCS1 = ...  # type: str
+
+class KeySerializationEncryption: ...
+
+class BestAvailableEncryption:
+    password = ...  # type: Any
+    def __init__(self, password) -> None: ...
+
+class NoEncryption: ...
diff --git a/typeshed/third_party/2/dateutil/tz/__init__.pyi b/typeshed/third_party/2/dateutil/tz/__init__.pyi
new file mode 100644
index 0000000..30a7ab3
--- /dev/null
+++ b/typeshed/third_party/2/dateutil/tz/__init__.pyi
@@ -0,0 +1,5 @@
+# Stubs for dateutil.tz (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from .tz import tzutc, tzoffset, tzlocal, tzfile, tzrange, tzstr, tzical, gettz, datetime_exists, datetime_ambiguous
diff --git a/typeshed/third_party/2/dateutil/tz/_common.pyi b/typeshed/third_party/2/dateutil/tz/_common.pyi
new file mode 100644
index 0000000..e0856d2
--- /dev/null
+++ b/typeshed/third_party/2/dateutil/tz/_common.pyi
@@ -0,0 +1,28 @@
+# Stubs for dateutil.tz._common (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, Optional
+from datetime import datetime, tzinfo, timedelta
+
+def tzname_in_python2(namefunc): ...
+def enfold(dt, fold: int = ...): ...
+
+class _DatetimeWithFold(datetime):
+    @property
+    def fold(self): ...
+
+class _tzinfo(tzinfo):
+    def is_ambiguous(self, dt: datetime) -> bool: ...
+    def fromutc(self, dt: datetime) -> datetime: ...
+
+class tzrangebase(_tzinfo):
+    def __init__(self) -> None: ...
+    def utcoffset(self, dt: Optional[datetime]) -> Optional[timedelta]: ...
+    def dst(self, dt: Optional[datetime]) -> Optional[timedelta]: ...
+    def tzname(self, dt: Optional[datetime]) -> str: ...
+    def fromutc(self, dt: datetime) -> datetime: ...
+    def is_ambiguous(self, dt: datetime) -> bool: ...
+    __hash__ = ...  # type: Any
+    def __ne__(self, other): ...
+    __reduce__ = ...  # type: Any
diff --git a/typeshed/third_party/2/dateutil/tz/tz.pyi b/typeshed/third_party/2/dateutil/tz/tz.pyi
new file mode 100644
index 0000000..ccd3819
--- /dev/null
+++ b/typeshed/third_party/2/dateutil/tz/tz.pyi
@@ -0,0 +1,85 @@
+# Stubs for dateutil.tz.tz (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, Optional, Union, IO, Tuple
+import datetime
+from ._common import tzname_in_python2 as tzname_in_python2, _tzinfo as _tzinfo
+from ._common import tzrangebase as tzrangebase, enfold as enfold
+from ..relativedelta import relativedelta
+
+ZERO = ...  # type: datetime.timedelta
+EPOCH = ...  # type: datetime.datetime
+EPOCHORDINAL = ...  # type: int
+
+class tzutc(datetime.tzinfo):
+    def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ...
+    def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ...
+    def tzname(self, dt: Optional[datetime.datetime]) -> str: ...
+    def is_ambiguous(self, dt: Optional[datetime.datetime]) -> bool: ...
+    def __eq__(self, other): ...
+    __hash__ = ...  # type: Any
+    def __ne__(self, other): ...
+    __reduce__ = ...  # type: Any
+
+class tzoffset(datetime.tzinfo):
+    def __init__(self, name, offset) -> None: ...
+    def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ...
+    def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ...
+    def is_ambiguous(self, dt: Optional[datetime.datetime]) -> bool: ...
+    def tzname(self, dt: Optional[datetime.datetime]) -> str: ...
+    def __eq__(self, other): ...
+    __hash__ = ...  # type: Any
+    def __ne__(self, other): ...
+    __reduce__ = ...  # type: Any
+
+class tzlocal(_tzinfo):
+    def __init__(self) -> None: ...
+    def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ...
+    def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ...
+    def tzname(self, dt: Optional[datetime.datetime]) -> str: ...
+    def is_ambiguous(self, dt: Optional[datetime.datetime]) -> bool: ...
+    def __eq__(self, other): ...
+    __hash__ = ...  # type: Any
+    def __ne__(self, other): ...
+    __reduce__ = ...  # type: Any
+
+class _ttinfo:
+    def __init__(self) -> None: ...
+    def __eq__(self, other): ...
+    __hash__ = ...  # type: Any
+    def __ne__(self, other): ...
+
+class tzfile(_tzinfo):
+    def __init__(self, fileobj: Union[unicode, str, IO[str], IO[unicode]], filename: Union[str, unicode, None] = ...) -> None: ...
+    def is_ambiguous(self, dt: Optional[datetime.datetime], idx: Optional[int] = ...) -> bool: ...
+    def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ...
+    def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ...
+    def tzname(self, dt: Optional[datetime.datetime]) -> str: ...
+    def __eq__(self, other): ...
+    __hash__ = ...  # type: Any
+    def __ne__(self, other): ...
+    def __reduce__(self): ...
+    def __reduce_ex__(self, protocol): ...
+
+class tzrange(tzrangebase):
+    hasdst = ...  # type: bool
+    def __init__(self, stdabbr: Union[str, unicode], stdoffset: Union[int, datetime.timedelta, None] = ..., dstabbr: Union[str, unicode, None] = ..., dstoffset: Union[int, datetime.timedelta, None] = ..., start: Optional[relativedelta] = ..., end: Optional[relativedelta] = ...) -> None: ...
+    def transitions(self, year: int) -> Tuple[datetime.datetime, datetime.datetime]: ...
+    def __eq__(self, other): ...
+
+class tzstr(tzrange):
+    hasdst = ...  # type: bool
+    def __init__(self, s, posix_offset: bool = ...) -> None: ...
+
+class tzical:
+    def __init__(self, fileobj: Union[unicode, str, IO[str], IO[unicode]]) -> None: ...
+    def keys(self): ...
+    def get(self, tzid: Optional[Any] = ...): ...
+
+TZFILES = ...  # type: List[str]
+TZPATHS = ...  # type: List[str]
+
+def gettz(name: Union[str, unicode, None] = ...) -> Optional[datetime.tzinfo]: ...
+def datetime_exists(dt: datetime.datetime, tz: Optional[datetime.tzinfo] = ...) -> bool: ...
+def datetime_ambiguous(dt: datetime.datetime, tz: Optional[datetime.tzinfo] = ...) -> bool: ...
diff --git a/typeshed/third_party/2/requests/api.pyi b/typeshed/third_party/2/requests/api.pyi
index 785439e..9f041d2 100644
--- a/typeshed/third_party/2/requests/api.pyi
+++ b/typeshed/third_party/2/requests/api.pyi
@@ -1,17 +1,22 @@
-# Stubs for requests.api (Python 3)
+# Stubs for requests.api (Python 2)
 
-from typing import Union, Optional, AnyStr
+from typing import Union, Optional, Iterable, Mapping, Tuple
 
 from .models import Response
 
-def request(method: str, url: str, **kwargs) -> Response: ...
+ParamsMappingValueType = Union[str, unicode, int, float, Iterable[Union[str, unicode, int, float]]]
 
+def request(method: str, url: str, **kwargs) -> Response: ...
 def get(url: Union[str, unicode],
-        params: Optional[Union[dict[Union[str, unicode],
-                                    Union[str, unicode]],
-                               Union[str, unicode]]]=None,
+        params: Optional[
+            Union[Mapping[Union[str, unicode, int, float], ParamsMappingValueType],
+                  Union[str, unicode],
+                  Tuple[Union[str, unicode, int, float], ParamsMappingValueType],
+                  Mapping[str, ParamsMappingValueType],
+                  Mapping[unicode, ParamsMappingValueType],
+                  Mapping[int, ParamsMappingValueType],
+                  Mapping[float, ParamsMappingValueType]]] = None,
         **kwargs) -> Response: ...
-
 def options(url: Union[str, unicode], **kwargs) -> Response: ...
 def head(url: Union[str, unicode], **kwargs) -> Response: ...
 def post(url: Union[str, unicode], data=..., json=...,
diff --git a/typeshed/third_party/2/six/__init__.pyi b/typeshed/third_party/2/six/__init__.pyi
index 302c458..c4c4723 100644
--- a/typeshed/third_party/2/six/__init__.pyi
+++ b/typeshed/third_party/2/six/__init__.pyi
@@ -27,7 +27,7 @@ PY2 = True
 PY3 = False
 PY34 = False
 
-string_types = basestring,
+string_types = (str, unicode)
 integer_types = (int, long)
 class_types = (type, types.ClassType)
 text_type = unicode
diff --git a/typeshed/third_party/2and3/pymysql/__init__.pyi b/typeshed/third_party/2and3/pymysql/__init__.pyi
new file mode 100644
index 0000000..d1161e8
--- /dev/null
+++ b/typeshed/third_party/2and3/pymysql/__init__.pyi
@@ -0,0 +1,39 @@
+# Stubs for pymysql (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Union, Tuple, Callable
+from .connections import Connection
+from .constants import FIELD_TYPE as FIELD_TYPE
+from .converters import escape_dict as escape_dict, escape_sequence as escape_sequence, escape_string as escape_string
+from .err import Warning as Warning, Error as Error, InterfaceError as InterfaceError, DataError as DataError, DatabaseError as DatabaseError, OperationalError as OperationalError, IntegrityError as IntegrityError, InternalError as InternalError, NotSupportedError as NotSupportedError, ProgrammingError as ProgrammingError, MySQLError as MySQLError
+from .times import Date as Date, Time as Time, Timestamp as Timestamp, DateFromTicks as DateFromTicks, TimeFromTicks as TimeFromTicks, TimestampFromTicks as TimestampFromTicks
+
+threadsafety = ...  # type: int
+apilevel = ...  # type: str
+paramstyle = ...  # type: str
+
+class DBAPISet(frozenset):
+    def __ne__(self, other) -> bool: ...
+    def __eq__(self, other) -> bool: ...
+    def __hash__(self) -> int: ...
+
+STRING = ...  # type: DBAPISet
+BINARY = ...  # type: DBAPISet
+NUMBER = ...  # type: DBAPISet
+DATE = ...  # type: DBAPISet
+TIME = ...  # type: DBAPISet
+TIMESTAMP = ...  # type: DBAPISet
+ROWID = ...  # type: DBAPISet
+
+def Binary(x) -> Union[bytearray, bytes]: ...
+def Connect(*args, **kwargs) -> Connection: ...
+def get_client_info() -> str: ...
+
+connect = ...  # type: Callable[..., Connection]
+
+
+version_info = ...  # type: Tuple[int, int, int, str, int]
+NULL = ...  # type: str
+
+def install_as_MySQLdb() -> None: ...
diff --git a/typeshed/third_party/2and3/pymysql/charset.pyi b/typeshed/third_party/2and3/pymysql/charset.pyi
new file mode 100644
index 0000000..4fc0df5
--- /dev/null
+++ b/typeshed/third_party/2and3/pymysql/charset.pyi
@@ -0,0 +1,20 @@
+# Stubs for pymysql.charset (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+MBLENGTH = ...  # type: Any
+
+class Charset:
+    is_default = ...  # type: Any
+    def __init__(self, id, name, collation, is_default): ...
+
+class Charsets:
+    def __init__(self): ...
+    def add(self, c): ...
+    def by_id(self, id): ...
+    def by_name(self, name): ...
+
+def charset_by_name(name): ...
+def charset_by_id(id): ...
diff --git a/typeshed/third_party/2and3/pymysql/connections.pyi b/typeshed/third_party/2and3/pymysql/connections.pyi
new file mode 100644
index 0000000..2eff7be
--- /dev/null
+++ b/typeshed/third_party/2and3/pymysql/connections.pyi
@@ -0,0 +1,142 @@
+# Stubs for pymysql.connections (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, Optional, Type
+from .charset import MBLENGTH as MBLENGTH, charset_by_name as charset_by_name, charset_by_id as charset_by_id
+from .cursors import Cursor as Cursor
+from .constants import FIELD_TYPE as FIELD_TYPE, FLAG as FLAG
+from .constants import SERVER_STATUS as SERVER_STATUS
+from .constants import CLIENT as CLIENT
+from .constants import COMMAND as COMMAND
+from .util import join_bytes as join_bytes, byte2int as byte2int, int2byte as int2byte
+from .converters import escape_item as escape_item, encoders as encoders, decoders as decoders
+from .err import raise_mysql_exception as raise_mysql_exception, Warning as Warning, Error as Error, InterfaceError as InterfaceError, DataError as DataError, DatabaseError as DatabaseError, OperationalError as OperationalError, IntegrityError as IntegrityError, InternalError as InternalError, NotSupportedError as NotSupportedError, ProgrammingError as ProgrammingError
+
+sha_new = ...  # type: Any
+SSL_ENABLED = ...  # type: Any
+DEFAULT_USER = ...  # type: Any
+DEBUG = ...  # type: Any
+NULL_COLUMN = ...  # type: Any
+UNSIGNED_CHAR_COLUMN = ...  # type: Any
+UNSIGNED_SHORT_COLUMN = ...  # type: Any
+UNSIGNED_INT24_COLUMN = ...  # type: Any
+UNSIGNED_INT64_COLUMN = ...  # type: Any
+UNSIGNED_CHAR_LENGTH = ...  # type: Any
+UNSIGNED_SHORT_LENGTH = ...  # type: Any
+UNSIGNED_INT24_LENGTH = ...  # type: Any
+UNSIGNED_INT64_LENGTH = ...  # type: Any
+DEFAULT_CHARSET = ...  # type: Any
+
+def dump_packet(data): ...
+
+SCRAMBLE_LENGTH_323 = ...  # type: Any
+
+class RandStruct_323:
+    max_value = ...  # type: Any
+    seed1 = ...  # type: Any
+    seed2 = ...  # type: Any
+    def __init__(self, seed1, seed2): ...
+    def my_rnd(self): ...
+
+def pack_int24(n): ...
+def unpack_uint16(n): ...
+def unpack_int24(n): ...
+def unpack_int32(n): ...
+def unpack_int64(n): ...
+def defaulterrorhandler(connection, cursor, errorclass, errorvalue): ...
+
+class MysqlPacket:
+    connection = ...  # type: Any
+    def __init__(self, connection): ...
+    def packet_number(self): ...
+    def get_all_data(self): ...
+    def read(self, size): ...
+    def read_all(self): ...
+    def advance(self, length): ...
+    def rewind(self, position=0): ...
+    def peek(self, size): ...
+    def get_bytes(self, position, length=1): ...
+    def read_length_coded_binary(self): ...
+    def read_length_coded_string(self): ...
+    def is_ok_packet(self): ...
+    def is_eof_packet(self): ...
+    def is_resultset_packet(self): ...
+    def is_error_packet(self): ...
+    def check_error(self): ...
+    def dump(self): ...
+
+class FieldDescriptorPacket(MysqlPacket):
+    def __init__(self, *args): ...
+    def description(self): ...
+    def get_column_length(self): ...
+
+class Connection:
+    errorhandler = ...  # type: Any
+    ssl = ...  # type: Any
+    host = ...  # type: Any
+    port = ...  # type: Any
+    user = ...  # type: Any
+    password = ...  # type: Any
+    db = ...  # type: Any
+    unix_socket = ...  # type: Any
+    charset = ...  # type: Any
+    use_unicode = ...  # type: Any
+    client_flag = ...  # type: Any
+    cursorclass = ...  # type: Any
+    connect_timeout = ...  # type: Any
+    messages = ...  # type: Any
+    encoders = ...  # type: Any
+    decoders = ...  # type: Any
+    host_info = ...  # type: Any
+    def __init__(self, host='', user=None, passwd='', db=None, port=3306, unix_socket=None, charset='', sql_mode=None, read_default_file=None, conv=..., use_unicode=None, client_flag=0, cursorclass=..., init_command=None, connect_timeout=None, ssl=None, read_default_group=None, compress=None, named_pipe=None): ...
+    socket = ...  # type: Any
+    rfile = ...  # type: Any
+    wfile = ...  # type: Any
+    def close(self): ...
+    def autocommit(self, value): ...
+    def commit(self): ...
+    def rollback(self): ...
+    def escape(self, obj): ...
+    def literal(self, obj): ...
+    def cursor(self, cursor: Optional[Type[Cursor]]=None): ...
+    def __enter__(self): ...
+    def __exit__(self, exc, value, traceback): ...
+    def query(self, sql): ...
+    def next_result(self): ...
+    def affected_rows(self): ...
+    def kill(self, thread_id): ...
+    def ping(self, reconnect=True): ...
+    def set_charset(self, charset): ...
+    def read_packet(self, packet_type=...): ...
+    def insert_id(self): ...
+    def thread_id(self): ...
+    def character_set_name(self): ...
+    def get_host_info(self): ...
+    def get_proto_info(self): ...
+    def get_server_info(self): ...
+    Warning = ...  # type: Any
+    Error = ...  # type: Any
+    InterfaceError = ...  # type: Any
+    DatabaseError = ...  # type: Any
+    DataError = ...  # type: Any
+    OperationalError = ...  # type: Any
+    IntegrityError = ...  # type: Any
+    InternalError = ...  # type: Any
+    ProgrammingError = ...  # type: Any
+    NotSupportedError = ...  # type: Any
+
+class MySQLResult:
+    connection = ...  # type: Any
+    affected_rows = ...  # type: Any
+    insert_id = ...  # type: Any
+    server_status = ...  # type: Any
+    warning_count = ...  # type: Any
+    message = ...  # type: Any
+    field_count = ...  # type: Any
+    description = ...  # type: Any
+    rows = ...  # type: Any
+    has_next = ...  # type: Any
+    def __init__(self, connection): ...
+    first_packet = ...  # type: Any
+    def read(self): ...
diff --git a/typeshed/third_party/2and3/pymysql/constants/CLIENT.pyi b/typeshed/third_party/2and3/pymysql/constants/CLIENT.pyi
new file mode 100644
index 0000000..30d9dfc
--- /dev/null
+++ b/typeshed/third_party/2and3/pymysql/constants/CLIENT.pyi
@@ -0,0 +1,24 @@
+# Stubs for pymysql.constants.CLIENT (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+LONG_PASSWORD = ...  # type: Any
+FOUND_ROWS = ...  # type: Any
+LONG_FLAG = ...  # type: Any
+CONNECT_WITH_DB = ...  # type: Any
+NO_SCHEMA = ...  # type: Any
+COMPRESS = ...  # type: Any
+ODBC = ...  # type: Any
+LOCAL_FILES = ...  # type: Any
+IGNORE_SPACE = ...  # type: Any
+PROTOCOL_41 = ...  # type: Any
+INTERACTIVE = ...  # type: Any
+SSL = ...  # type: Any
+IGNORE_SIGPIPE = ...  # type: Any
+TRANSACTIONS = ...  # type: Any
+SECURE_CONNECTION = ...  # type: Any
+MULTI_STATEMENTS = ...  # type: Any
+MULTI_RESULTS = ...  # type: Any
+CAPABILITIES = ...  # type: Any
diff --git a/typeshed/third_party/2and3/pymysql/constants/COMMAND.pyi b/typeshed/third_party/2and3/pymysql/constants/COMMAND.pyi
new file mode 100644
index 0000000..4ec68d5
--- /dev/null
+++ b/typeshed/third_party/2and3/pymysql/constants/COMMAND.pyi
@@ -0,0 +1,28 @@
+# Stubs for pymysql.constants.COMMAND (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+COM_SLEEP = ...  # type: Any
+COM_QUIT = ...  # type: Any
+COM_INIT_DB = ...  # type: Any
+COM_QUERY = ...  # type: Any
+COM_FIELD_LIST = ...  # type: Any
+COM_CREATE_DB = ...  # type: Any
+COM_DROP_DB = ...  # type: Any
+COM_REFRESH = ...  # type: Any
+COM_SHUTDOWN = ...  # type: Any
+COM_STATISTICS = ...  # type: Any
+COM_PROCESS_INFO = ...  # type: Any
+COM_CONNECT = ...  # type: Any
+COM_PROCESS_KILL = ...  # type: Any
+COM_DEBUG = ...  # type: Any
+COM_PING = ...  # type: Any
+COM_TIME = ...  # type: Any
+COM_DELAYED_INSERT = ...  # type: Any
+COM_CHANGE_USER = ...  # type: Any
+COM_BINLOG_DUMP = ...  # type: Any
+COM_TABLE_DUMP = ...  # type: Any
+COM_CONNECT_OUT = ...  # type: Any
+COM_REGISTER_SLAVE = ...  # type: Any
diff --git a/typeshed/third_party/2and3/pymysql/constants/ER.pyi b/typeshed/third_party/2and3/pymysql/constants/ER.pyi
new file mode 100644
index 0000000..0ab17b2
--- /dev/null
+++ b/typeshed/third_party/2and3/pymysql/constants/ER.pyi
@@ -0,0 +1,477 @@
+# Stubs for pymysql.constants.ER (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+ERROR_FIRST = ...  # type: Any
+HASHCHK = ...  # type: Any
+NISAMCHK = ...  # type: Any
+NO = ...  # type: Any
+YES = ...  # type: Any
+CANT_CREATE_FILE = ...  # type: Any
+CANT_CREATE_TABLE = ...  # type: Any
+CANT_CREATE_DB = ...  # type: Any
+DB_CREATE_EXISTS = ...  # type: Any
+DB_DROP_EXISTS = ...  # type: Any
+DB_DROP_DELETE = ...  # type: Any
+DB_DROP_RMDIR = ...  # type: Any
+CANT_DELETE_FILE = ...  # type: Any
+CANT_FIND_SYSTEM_REC = ...  # type: Any
+CANT_GET_STAT = ...  # type: Any
+CANT_GET_WD = ...  # type: Any
+CANT_LOCK = ...  # type: Any
+CANT_OPEN_FILE = ...  # type: Any
+FILE_NOT_FOUND = ...  # type: Any
+CANT_READ_DIR = ...  # type: Any
+CANT_SET_WD = ...  # type: Any
+CHECKREAD = ...  # type: Any
+DISK_FULL = ...  # type: Any
+DUP_KEY = ...  # type: Any
+ERROR_ON_CLOSE = ...  # type: Any
+ERROR_ON_READ = ...  # type: Any
+ERROR_ON_RENAME = ...  # type: Any
+ERROR_ON_WRITE = ...  # type: Any
+FILE_USED = ...  # type: Any
+FILSORT_ABORT = ...  # type: Any
+FORM_NOT_FOUND = ...  # type: Any
+GET_ERRNO = ...  # type: Any
+ILLEGAL_HA = ...  # type: Any
+KEY_NOT_FOUND = ...  # type: Any
+NOT_FORM_FILE = ...  # type: Any
+NOT_KEYFILE = ...  # type: Any
+OLD_KEYFILE = ...  # type: Any
+OPEN_AS_READONLY = ...  # type: Any
+OUTOFMEMORY = ...  # type: Any
+OUT_OF_SORTMEMORY = ...  # type: Any
+UNEXPECTED_EOF = ...  # type: Any
+CON_COUNT_ERROR = ...  # type: Any
+OUT_OF_RESOURCES = ...  # type: Any
+BAD_HOST_ERROR = ...  # type: Any
+HANDSHAKE_ERROR = ...  # type: Any
+DBACCESS_DENIED_ERROR = ...  # type: Any
+ACCESS_DENIED_ERROR = ...  # type: Any
+NO_DB_ERROR = ...  # type: Any
+UNKNOWN_COM_ERROR = ...  # type: Any
+BAD_NULL_ERROR = ...  # type: Any
+BAD_DB_ERROR = ...  # type: Any
+TABLE_EXISTS_ERROR = ...  # type: Any
+BAD_TABLE_ERROR = ...  # type: Any
+NON_UNIQ_ERROR = ...  # type: Any
+SERVER_SHUTDOWN = ...  # type: Any
+BAD_FIELD_ERROR = ...  # type: Any
+WRONG_FIELD_WITH_GROUP = ...  # type: Any
+WRONG_GROUP_FIELD = ...  # type: Any
+WRONG_SUM_SELECT = ...  # type: Any
+WRONG_VALUE_COUNT = ...  # type: Any
+TOO_LONG_IDENT = ...  # type: Any
+DUP_FIELDNAME = ...  # type: Any
+DUP_KEYNAME = ...  # type: Any
+DUP_ENTRY = ...  # type: Any
+WRONG_FIELD_SPEC = ...  # type: Any
+PARSE_ERROR = ...  # type: Any
+EMPTY_QUERY = ...  # type: Any
+NONUNIQ_TABLE = ...  # type: Any
+INVALID_DEFAULT = ...  # type: Any
+MULTIPLE_PRI_KEY = ...  # type: Any
+TOO_MANY_KEYS = ...  # type: Any
+TOO_MANY_KEY_PARTS = ...  # type: Any
+TOO_LONG_KEY = ...  # type: Any
+KEY_COLUMN_DOES_NOT_EXITS = ...  # type: Any
+BLOB_USED_AS_KEY = ...  # type: Any
+TOO_BIG_FIELDLENGTH = ...  # type: Any
+WRONG_AUTO_KEY = ...  # type: Any
+READY = ...  # type: Any
+NORMAL_SHUTDOWN = ...  # type: Any
+GOT_SIGNAL = ...  # type: Any
+SHUTDOWN_COMPLETE = ...  # type: Any
+FORCING_CLOSE = ...  # type: Any
+IPSOCK_ERROR = ...  # type: Any
+NO_SUCH_INDEX = ...  # type: Any
+WRONG_FIELD_TERMINATORS = ...  # type: Any
+BLOBS_AND_NO_TERMINATED = ...  # type: Any
+TEXTFILE_NOT_READABLE = ...  # type: Any
+FILE_EXISTS_ERROR = ...  # type: Any
+LOAD_INFO = ...  # type: Any
+ALTER_INFO = ...  # type: Any
+WRONG_SUB_KEY = ...  # type: Any
+CANT_REMOVE_ALL_FIELDS = ...  # type: Any
+CANT_DROP_FIELD_OR_KEY = ...  # type: Any
+INSERT_INFO = ...  # type: Any
+UPDATE_TABLE_USED = ...  # type: Any
+NO_SUCH_THREAD = ...  # type: Any
+KILL_DENIED_ERROR = ...  # type: Any
+NO_TABLES_USED = ...  # type: Any
+TOO_BIG_SET = ...  # type: Any
+NO_UNIQUE_LOGFILE = ...  # type: Any
+TABLE_NOT_LOCKED_FOR_WRITE = ...  # type: Any
+TABLE_NOT_LOCKED = ...  # type: Any
+BLOB_CANT_HAVE_DEFAULT = ...  # type: Any
+WRONG_DB_NAME = ...  # type: Any
+WRONG_TABLE_NAME = ...  # type: Any
+TOO_BIG_SELECT = ...  # type: Any
+UNKNOWN_ERROR = ...  # type: Any
+UNKNOWN_PROCEDURE = ...  # type: Any
+WRONG_PARAMCOUNT_TO_PROCEDURE = ...  # type: Any
+WRONG_PARAMETERS_TO_PROCEDURE = ...  # type: Any
+UNKNOWN_TABLE = ...  # type: Any
+FIELD_SPECIFIED_TWICE = ...  # type: Any
+INVALID_GROUP_FUNC_USE = ...  # type: Any
+UNSUPPORTED_EXTENSION = ...  # type: Any
+TABLE_MUST_HAVE_COLUMNS = ...  # type: Any
+RECORD_FILE_FULL = ...  # type: Any
+UNKNOWN_CHARACTER_SET = ...  # type: Any
+TOO_MANY_TABLES = ...  # type: Any
+TOO_MANY_FIELDS = ...  # type: Any
+TOO_BIG_ROWSIZE = ...  # type: Any
+STACK_OVERRUN = ...  # type: Any
+WRONG_OUTER_JOIN = ...  # type: Any
+NULL_COLUMN_IN_INDEX = ...  # type: Any
+CANT_FIND_UDF = ...  # type: Any
+CANT_INITIALIZE_UDF = ...  # type: Any
+UDF_NO_PATHS = ...  # type: Any
+UDF_EXISTS = ...  # type: Any
+CANT_OPEN_LIBRARY = ...  # type: Any
+CANT_FIND_DL_ENTRY = ...  # type: Any
+FUNCTION_NOT_DEFINED = ...  # type: Any
+HOST_IS_BLOCKED = ...  # type: Any
+HOST_NOT_PRIVILEGED = ...  # type: Any
+PASSWORD_ANONYMOUS_USER = ...  # type: Any
+PASSWORD_NOT_ALLOWED = ...  # type: Any
+PASSWORD_NO_MATCH = ...  # type: Any
+UPDATE_INFO = ...  # type: Any
+CANT_CREATE_THREAD = ...  # type: Any
+WRONG_VALUE_COUNT_ON_ROW = ...  # type: Any
+CANT_REOPEN_TABLE = ...  # type: Any
+INVALID_USE_OF_NULL = ...  # type: Any
+REGEXP_ERROR = ...  # type: Any
+MIX_OF_GROUP_FUNC_AND_FIELDS = ...  # type: Any
+NONEXISTING_GRANT = ...  # type: Any
+TABLEACCESS_DENIED_ERROR = ...  # type: Any
+COLUMNACCESS_DENIED_ERROR = ...  # type: Any
+ILLEGAL_GRANT_FOR_TABLE = ...  # type: Any
+GRANT_WRONG_HOST_OR_USER = ...  # type: Any
+NO_SUCH_TABLE = ...  # type: Any
+NONEXISTING_TABLE_GRANT = ...  # type: Any
+NOT_ALLOWED_COMMAND = ...  # type: Any
+SYNTAX_ERROR = ...  # type: Any
+DELAYED_CANT_CHANGE_LOCK = ...  # type: Any
+TOO_MANY_DELAYED_THREADS = ...  # type: Any
+ABORTING_CONNECTION = ...  # type: Any
+NET_PACKET_TOO_LARGE = ...  # type: Any
+NET_READ_ERROR_FROM_PIPE = ...  # type: Any
+NET_FCNTL_ERROR = ...  # type: Any
+NET_PACKETS_OUT_OF_ORDER = ...  # type: Any
+NET_UNCOMPRESS_ERROR = ...  # type: Any
+NET_READ_ERROR = ...  # type: Any
+NET_READ_INTERRUPTED = ...  # type: Any
+NET_ERROR_ON_WRITE = ...  # type: Any
+NET_WRITE_INTERRUPTED = ...  # type: Any
+TOO_LONG_STRING = ...  # type: Any
+TABLE_CANT_HANDLE_BLOB = ...  # type: Any
+TABLE_CANT_HANDLE_AUTO_INCREMENT = ...  # type: Any
+DELAYED_INSERT_TABLE_LOCKED = ...  # type: Any
+WRONG_COLUMN_NAME = ...  # type: Any
+WRONG_KEY_COLUMN = ...  # type: Any
+WRONG_MRG_TABLE = ...  # type: Any
+DUP_UNIQUE = ...  # type: Any
+BLOB_KEY_WITHOUT_LENGTH = ...  # type: Any
+PRIMARY_CANT_HAVE_NULL = ...  # type: Any
+TOO_MANY_ROWS = ...  # type: Any
+REQUIRES_PRIMARY_KEY = ...  # type: Any
+NO_RAID_COMPILED = ...  # type: Any
+UPDATE_WITHOUT_KEY_IN_SAFE_MODE = ...  # type: Any
+KEY_DOES_NOT_EXITS = ...  # type: Any
+CHECK_NO_SUCH_TABLE = ...  # type: Any
+CHECK_NOT_IMPLEMENTED = ...  # type: Any
+CANT_DO_THIS_DURING_AN_TRANSACTION = ...  # type: Any
+ERROR_DURING_COMMIT = ...  # type: Any
+ERROR_DURING_ROLLBACK = ...  # type: Any
+ERROR_DURING_FLUSH_LOGS = ...  # type: Any
+ERROR_DURING_CHECKPOINT = ...  # type: Any
+NEW_ABORTING_CONNECTION = ...  # type: Any
+DUMP_NOT_IMPLEMENTED = ...  # type: Any
+FLUSH_MASTER_BINLOG_CLOSED = ...  # type: Any
+INDEX_REBUILD = ...  # type: Any
+MASTER = ...  # type: Any
+MASTER_NET_READ = ...  # type: Any
+MASTER_NET_WRITE = ...  # type: Any
+FT_MATCHING_KEY_NOT_FOUND = ...  # type: Any
+LOCK_OR_ACTIVE_TRANSACTION = ...  # type: Any
+UNKNOWN_SYSTEM_VARIABLE = ...  # type: Any
+CRASHED_ON_USAGE = ...  # type: Any
+CRASHED_ON_REPAIR = ...  # type: Any
+WARNING_NOT_COMPLETE_ROLLBACK = ...  # type: Any
+TRANS_CACHE_FULL = ...  # type: Any
+SLAVE_MUST_STOP = ...  # type: Any
+SLAVE_NOT_RUNNING = ...  # type: Any
+BAD_SLAVE = ...  # type: Any
+MASTER_INFO = ...  # type: Any
+SLAVE_THREAD = ...  # type: Any
+TOO_MANY_USER_CONNECTIONS = ...  # type: Any
+SET_CONSTANTS_ONLY = ...  # type: Any
+LOCK_WAIT_TIMEOUT = ...  # type: Any
+LOCK_TABLE_FULL = ...  # type: Any
+READ_ONLY_TRANSACTION = ...  # type: Any
+DROP_DB_WITH_READ_LOCK = ...  # type: Any
+CREATE_DB_WITH_READ_LOCK = ...  # type: Any
+WRONG_ARGUMENTS = ...  # type: Any
+NO_PERMISSION_TO_CREATE_USER = ...  # type: Any
+UNION_TABLES_IN_DIFFERENT_DIR = ...  # type: Any
+LOCK_DEADLOCK = ...  # type: Any
+TABLE_CANT_HANDLE_FT = ...  # type: Any
+CANNOT_ADD_FOREIGN = ...  # type: Any
+NO_REFERENCED_ROW = ...  # type: Any
+ROW_IS_REFERENCED = ...  # type: Any
+CONNECT_TO_MASTER = ...  # type: Any
+QUERY_ON_MASTER = ...  # type: Any
+ERROR_WHEN_EXECUTING_COMMAND = ...  # type: Any
+WRONG_USAGE = ...  # type: Any
+WRONG_NUMBER_OF_COLUMNS_IN_SELECT = ...  # type: Any
+CANT_UPDATE_WITH_READLOCK = ...  # type: Any
+MIXING_NOT_ALLOWED = ...  # type: Any
+DUP_ARGUMENT = ...  # type: Any
+USER_LIMIT_REACHED = ...  # type: Any
+SPECIFIC_ACCESS_DENIED_ERROR = ...  # type: Any
+LOCAL_VARIABLE = ...  # type: Any
+GLOBAL_VARIABLE = ...  # type: Any
+NO_DEFAULT = ...  # type: Any
+WRONG_VALUE_FOR_VAR = ...  # type: Any
+WRONG_TYPE_FOR_VAR = ...  # type: Any
+VAR_CANT_BE_READ = ...  # type: Any
+CANT_USE_OPTION_HERE = ...  # type: Any
+NOT_SUPPORTED_YET = ...  # type: Any
+MASTER_FATAL_ERROR_READING_BINLOG = ...  # type: Any
+SLAVE_IGNORED_TABLE = ...  # type: Any
+INCORRECT_GLOBAL_LOCAL_VAR = ...  # type: Any
+WRONG_FK_DEF = ...  # type: Any
+KEY_REF_DO_NOT_MATCH_TABLE_REF = ...  # type: Any
+OPERAND_COLUMNS = ...  # type: Any
+SUBQUERY_NO_1_ROW = ...  # type: Any
+UNKNOWN_STMT_HANDLER = ...  # type: Any
+CORRUPT_HELP_DB = ...  # type: Any
+CYCLIC_REFERENCE = ...  # type: Any
+AUTO_CONVERT = ...  # type: Any
+ILLEGAL_REFERENCE = ...  # type: Any
+DERIVED_MUST_HAVE_ALIAS = ...  # type: Any
+SELECT_REDUCED = ...  # type: Any
+TABLENAME_NOT_ALLOWED_HERE = ...  # type: Any
+NOT_SUPPORTED_AUTH_MODE = ...  # type: Any
+SPATIAL_CANT_HAVE_NULL = ...  # type: Any
+COLLATION_CHARSET_MISMATCH = ...  # type: Any
+SLAVE_WAS_RUNNING = ...  # type: Any
+SLAVE_WAS_NOT_RUNNING = ...  # type: Any
+TOO_BIG_FOR_UNCOMPRESS = ...  # type: Any
+ZLIB_Z_MEM_ERROR = ...  # type: Any
+ZLIB_Z_BUF_ERROR = ...  # type: Any
+ZLIB_Z_DATA_ERROR = ...  # type: Any
+CUT_VALUE_GROUP_CONCAT = ...  # type: Any
+WARN_TOO_FEW_RECORDS = ...  # type: Any
+WARN_TOO_MANY_RECORDS = ...  # type: Any
+WARN_NULL_TO_NOTNULL = ...  # type: Any
+WARN_DATA_OUT_OF_RANGE = ...  # type: Any
+WARN_DATA_TRUNCATED = ...  # type: Any
+WARN_USING_OTHER_HANDLER = ...  # type: Any
+CANT_AGGREGATE_2COLLATIONS = ...  # type: Any
+DROP_USER = ...  # type: Any
+REVOKE_GRANTS = ...  # type: Any
+CANT_AGGREGATE_3COLLATIONS = ...  # type: Any
+CANT_AGGREGATE_NCOLLATIONS = ...  # type: Any
+VARIABLE_IS_NOT_STRUCT = ...  # type: Any
+UNKNOWN_COLLATION = ...  # type: Any
+SLAVE_IGNORED_SSL_PARAMS = ...  # type: Any
+SERVER_IS_IN_SECURE_AUTH_MODE = ...  # type: Any
+WARN_FIELD_RESOLVED = ...  # type: Any
+BAD_SLAVE_UNTIL_COND = ...  # type: Any
+MISSING_SKIP_SLAVE = ...  # type: Any
+UNTIL_COND_IGNORED = ...  # type: Any
+WRONG_NAME_FOR_INDEX = ...  # type: Any
+WRONG_NAME_FOR_CATALOG = ...  # type: Any
+WARN_QC_RESIZE = ...  # type: Any
+BAD_FT_COLUMN = ...  # type: Any
+UNKNOWN_KEY_CACHE = ...  # type: Any
+WARN_HOSTNAME_WONT_WORK = ...  # type: Any
+UNKNOWN_STORAGE_ENGINE = ...  # type: Any
+WARN_DEPRECATED_SYNTAX = ...  # type: Any
+NON_UPDATABLE_TABLE = ...  # type: Any
+FEATURE_DISABLED = ...  # type: Any
+OPTION_PREVENTS_STATEMENT = ...  # type: Any
+DUPLICATED_VALUE_IN_TYPE = ...  # type: Any
+TRUNCATED_WRONG_VALUE = ...  # type: Any
+TOO_MUCH_AUTO_TIMESTAMP_COLS = ...  # type: Any
+INVALID_ON_UPDATE = ...  # type: Any
+UNSUPPORTED_PS = ...  # type: Any
+GET_ERRMSG = ...  # type: Any
+GET_TEMPORARY_ERRMSG = ...  # type: Any
+UNKNOWN_TIME_ZONE = ...  # type: Any
+WARN_INVALID_TIMESTAMP = ...  # type: Any
+INVALID_CHARACTER_STRING = ...  # type: Any
+WARN_ALLOWED_PACKET_OVERFLOWED = ...  # type: Any
+CONFLICTING_DECLARATIONS = ...  # type: Any
+SP_NO_RECURSIVE_CREATE = ...  # type: Any
+SP_ALREADY_EXISTS = ...  # type: Any
+SP_DOES_NOT_EXIST = ...  # type: Any
+SP_DROP_FAILED = ...  # type: Any
+SP_STORE_FAILED = ...  # type: Any
+SP_LILABEL_MISMATCH = ...  # type: Any
+SP_LABEL_REDEFINE = ...  # type: Any
+SP_LABEL_MISMATCH = ...  # type: Any
+SP_UNINIT_VAR = ...  # type: Any
+SP_BADSELECT = ...  # type: Any
+SP_BADRETURN = ...  # type: Any
+SP_BADSTATEMENT = ...  # type: Any
+UPDATE_LOG_DEPRECATED_IGNORED = ...  # type: Any
+UPDATE_LOG_DEPRECATED_TRANSLATED = ...  # type: Any
+QUERY_INTERRUPTED = ...  # type: Any
+SP_WRONG_NO_OF_ARGS = ...  # type: Any
+SP_COND_MISMATCH = ...  # type: Any
+SP_NORETURN = ...  # type: Any
+SP_NORETURNEND = ...  # type: Any
+SP_BAD_CURSOR_QUERY = ...  # type: Any
+SP_BAD_CURSOR_SELECT = ...  # type: Any
+SP_CURSOR_MISMATCH = ...  # type: Any
+SP_CURSOR_ALREADY_OPEN = ...  # type: Any
+SP_CURSOR_NOT_OPEN = ...  # type: Any
+SP_UNDECLARED_VAR = ...  # type: Any
+SP_WRONG_NO_OF_FETCH_ARGS = ...  # type: Any
+SP_FETCH_NO_DATA = ...  # type: Any
+SP_DUP_PARAM = ...  # type: Any
+SP_DUP_VAR = ...  # type: Any
+SP_DUP_COND = ...  # type: Any
+SP_DUP_CURS = ...  # type: Any
+SP_CANT_ALTER = ...  # type: Any
+SP_SUBSELECT_NYI = ...  # type: Any
+STMT_NOT_ALLOWED_IN_SF_OR_TRG = ...  # type: Any
+SP_VARCOND_AFTER_CURSHNDLR = ...  # type: Any
+SP_CURSOR_AFTER_HANDLER = ...  # type: Any
+SP_CASE_NOT_FOUND = ...  # type: Any
+FPARSER_TOO_BIG_FILE = ...  # type: Any
+FPARSER_BAD_HEADER = ...  # type: Any
+FPARSER_EOF_IN_COMMENT = ...  # type: Any
+FPARSER_ERROR_IN_PARAMETER = ...  # type: Any
+FPARSER_EOF_IN_UNKNOWN_PARAMETER = ...  # type: Any
+VIEW_NO_EXPLAIN = ...  # type: Any
+FRM_UNKNOWN_TYPE = ...  # type: Any
+WRONG_OBJECT = ...  # type: Any
+NONUPDATEABLE_COLUMN = ...  # type: Any
+VIEW_SELECT_DERIVED = ...  # type: Any
+VIEW_SELECT_CLAUSE = ...  # type: Any
+VIEW_SELECT_VARIABLE = ...  # type: Any
+VIEW_SELECT_TMPTABLE = ...  # type: Any
+VIEW_WRONG_LIST = ...  # type: Any
+WARN_VIEW_MERGE = ...  # type: Any
+WARN_VIEW_WITHOUT_KEY = ...  # type: Any
+VIEW_INVALID = ...  # type: Any
+SP_NO_DROP_SP = ...  # type: Any
+SP_GOTO_IN_HNDLR = ...  # type: Any
+TRG_ALREADY_EXISTS = ...  # type: Any
+TRG_DOES_NOT_EXIST = ...  # type: Any
+TRG_ON_VIEW_OR_TEMP_TABLE = ...  # type: Any
+TRG_CANT_CHANGE_ROW = ...  # type: Any
+TRG_NO_SUCH_ROW_IN_TRG = ...  # type: Any
+NO_DEFAULT_FOR_FIELD = ...  # type: Any
+DIVISION_BY_ZERO = ...  # type: Any
+TRUNCATED_WRONG_VALUE_FOR_FIELD = ...  # type: Any
+ILLEGAL_VALUE_FOR_TYPE = ...  # type: Any
+VIEW_NONUPD_CHECK = ...  # type: Any
+VIEW_CHECK_FAILED = ...  # type: Any
+PROCACCESS_DENIED_ERROR = ...  # type: Any
+RELAY_LOG_FAIL = ...  # type: Any
+PASSWD_LENGTH = ...  # type: Any
+UNKNOWN_TARGET_BINLOG = ...  # type: Any
+IO_ERR_LOG_INDEX_READ = ...  # type: Any
+BINLOG_PURGE_PROHIBITED = ...  # type: Any
+FSEEK_FAIL = ...  # type: Any
+BINLOG_PURGE_FATAL_ERR = ...  # type: Any
+LOG_IN_USE = ...  # type: Any
+LOG_PURGE_UNKNOWN_ERR = ...  # type: Any
+RELAY_LOG_INIT = ...  # type: Any
+NO_BINARY_LOGGING = ...  # type: Any
+RESERVED_SYNTAX = ...  # type: Any
+WSAS_FAILED = ...  # type: Any
+DIFF_GROUPS_PROC = ...  # type: Any
+NO_GROUP_FOR_PROC = ...  # type: Any
+ORDER_WITH_PROC = ...  # type: Any
+LOGGING_PROHIBIT_CHANGING_OF = ...  # type: Any
+NO_FILE_MAPPING = ...  # type: Any
+WRONG_MAGIC = ...  # type: Any
+PS_MANY_PARAM = ...  # type: Any
+KEY_PART_0 = ...  # type: Any
+VIEW_CHECKSUM = ...  # type: Any
+VIEW_MULTIUPDATE = ...  # type: Any
+VIEW_NO_INSERT_FIELD_LIST = ...  # type: Any
+VIEW_DELETE_MERGE_VIEW = ...  # type: Any
+CANNOT_USER = ...  # type: Any
+XAER_NOTA = ...  # type: Any
+XAER_INVAL = ...  # type: Any
+XAER_RMFAIL = ...  # type: Any
+XAER_OUTSIDE = ...  # type: Any
+XAER_RMERR = ...  # type: Any
+XA_RBROLLBACK = ...  # type: Any
+NONEXISTING_PROC_GRANT = ...  # type: Any
+PROC_AUTO_GRANT_FAIL = ...  # type: Any
+PROC_AUTO_REVOKE_FAIL = ...  # type: Any
+DATA_TOO_LONG = ...  # type: Any
+SP_BAD_SQLSTATE = ...  # type: Any
+STARTUP = ...  # type: Any
+LOAD_FROM_FIXED_SIZE_ROWS_TO_VAR = ...  # type: Any
+CANT_CREATE_USER_WITH_GRANT = ...  # type: Any
+WRONG_VALUE_FOR_TYPE = ...  # type: Any
+TABLE_DEF_CHANGED = ...  # type: Any
+SP_DUP_HANDLER = ...  # type: Any
+SP_NOT_VAR_ARG = ...  # type: Any
+SP_NO_RETSET = ...  # type: Any
+CANT_CREATE_GEOMETRY_OBJECT = ...  # type: Any
+FAILED_ROUTINE_BREAK_BINLOG = ...  # type: Any
+BINLOG_UNSAFE_ROUTINE = ...  # type: Any
+BINLOG_CREATE_ROUTINE_NEED_SUPER = ...  # type: Any
+EXEC_STMT_WITH_OPEN_CURSOR = ...  # type: Any
+STMT_HAS_NO_OPEN_CURSOR = ...  # type: Any
+COMMIT_NOT_ALLOWED_IN_SF_OR_TRG = ...  # type: Any
+NO_DEFAULT_FOR_VIEW_FIELD = ...  # type: Any
+SP_NO_RECURSION = ...  # type: Any
+TOO_BIG_SCALE = ...  # type: Any
+TOO_BIG_PRECISION = ...  # type: Any
+M_BIGGER_THAN_D = ...  # type: Any
+WRONG_LOCK_OF_SYSTEM_TABLE = ...  # type: Any
+CONNECT_TO_FOREIGN_DATA_SOURCE = ...  # type: Any
+QUERY_ON_FOREIGN_DATA_SOURCE = ...  # type: Any
+FOREIGN_DATA_SOURCE_DOESNT_EXIST = ...  # type: Any
+FOREIGN_DATA_STRING_INVALID_CANT_CREATE = ...  # type: Any
+FOREIGN_DATA_STRING_INVALID = ...  # type: Any
+CANT_CREATE_FEDERATED_TABLE = ...  # type: Any
+TRG_IN_WRONG_SCHEMA = ...  # type: Any
+STACK_OVERRUN_NEED_MORE = ...  # type: Any
+TOO_LONG_BODY = ...  # type: Any
+WARN_CANT_DROP_DEFAULT_KEYCACHE = ...  # type: Any
+TOO_BIG_DISPLAYWIDTH = ...  # type: Any
+XAER_DUPID = ...  # type: Any
+DATETIME_FUNCTION_OVERFLOW = ...  # type: Any
+CANT_UPDATE_USED_TABLE_IN_SF_OR_TRG = ...  # type: Any
+VIEW_PREVENT_UPDATE = ...  # type: Any
+PS_NO_RECURSION = ...  # type: Any
+SP_CANT_SET_AUTOCOMMIT = ...  # type: Any
+MALFORMED_DEFINER = ...  # type: Any
+VIEW_FRM_NO_USER = ...  # type: Any
+VIEW_OTHER_USER = ...  # type: Any
+NO_SUCH_USER = ...  # type: Any
+FORBID_SCHEMA_CHANGE = ...  # type: Any
+ROW_IS_REFERENCED_2 = ...  # type: Any
+NO_REFERENCED_ROW_2 = ...  # type: Any
+SP_BAD_VAR_SHADOW = ...  # type: Any
+TRG_NO_DEFINER = ...  # type: Any
+OLD_FILE_FORMAT = ...  # type: Any
+SP_RECURSION_LIMIT = ...  # type: Any
+SP_PROC_TABLE_CORRUPT = ...  # type: Any
+SP_WRONG_NAME = ...  # type: Any
+TABLE_NEEDS_UPGRADE = ...  # type: Any
+SP_NO_AGGREGATE = ...  # type: Any
+MAX_PREPARED_STMT_COUNT_REACHED = ...  # type: Any
+VIEW_RECURSIVE = ...  # type: Any
+NON_GROUPING_FIELD_USED = ...  # type: Any
+TABLE_CANT_HANDLE_SPKEYS = ...  # type: Any
+NO_TRIGGERS_ON_SYSTEM_SCHEMA = ...  # type: Any
+USERNAME = ...  # type: Any
+HOSTNAME = ...  # type: Any
+WRONG_STRING_LENGTH = ...  # type: Any
+ERROR_LAST = ...  # type: Any
diff --git a/typeshed/third_party/2and3/pymysql/constants/FIELD_TYPE.pyi b/typeshed/third_party/2and3/pymysql/constants/FIELD_TYPE.pyi
new file mode 100644
index 0000000..e9987b2
--- /dev/null
+++ b/typeshed/third_party/2and3/pymysql/constants/FIELD_TYPE.pyi
@@ -0,0 +1,35 @@
+# Stubs for pymysql.constants.FIELD_TYPE (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+DECIMAL = ...  # type: Any
+TINY = ...  # type: Any
+SHORT = ...  # type: Any
+LONG = ...  # type: Any
+FLOAT = ...  # type: Any
+DOUBLE = ...  # type: Any
+NULL = ...  # type: Any
+TIMESTAMP = ...  # type: Any
+LONGLONG = ...  # type: Any
+INT24 = ...  # type: Any
+DATE = ...  # type: Any
+TIME = ...  # type: Any
+DATETIME = ...  # type: Any
+YEAR = ...  # type: Any
+NEWDATE = ...  # type: Any
+VARCHAR = ...  # type: Any
+BIT = ...  # type: Any
+NEWDECIMAL = ...  # type: Any
+ENUM = ...  # type: Any
+SET = ...  # type: Any
+TINY_BLOB = ...  # type: Any
+MEDIUM_BLOB = ...  # type: Any
+LONG_BLOB = ...  # type: Any
+BLOB = ...  # type: Any
+VAR_STRING = ...  # type: Any
+STRING = ...  # type: Any
+GEOMETRY = ...  # type: Any
+CHAR = ...  # type: Any
+INTERVAL = ...  # type: Any
diff --git a/typeshed/third_party/2and3/pymysql/constants/FLAG.pyi b/typeshed/third_party/2and3/pymysql/constants/FLAG.pyi
new file mode 100644
index 0000000..792df90
--- /dev/null
+++ b/typeshed/third_party/2and3/pymysql/constants/FLAG.pyi
@@ -0,0 +1,21 @@
+# Stubs for pymysql.constants.FLAG (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+NOT_NULL = ...  # type: Any
+PRI_KEY = ...  # type: Any
+UNIQUE_KEY = ...  # type: Any
+MULTIPLE_KEY = ...  # type: Any
+BLOB = ...  # type: Any
+UNSIGNED = ...  # type: Any
+ZEROFILL = ...  # type: Any
+BINARY = ...  # type: Any
+ENUM = ...  # type: Any
+AUTO_INCREMENT = ...  # type: Any
+TIMESTAMP = ...  # type: Any
+SET = ...  # type: Any
+PART_KEY = ...  # type: Any
+GROUP = ...  # type: Any
+UNIQUE = ...  # type: Any
diff --git a/typeshed/third_party/2and3/pymysql/constants/SERVER_STATUS.pyi b/typeshed/third_party/2and3/pymysql/constants/SERVER_STATUS.pyi
new file mode 100644
index 0000000..6e4db8d
--- /dev/null
+++ b/typeshed/third_party/2and3/pymysql/constants/SERVER_STATUS.pyi
@@ -0,0 +1,16 @@
+# Stubs for pymysql.constants.SERVER_STATUS (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+SERVER_STATUS_IN_TRANS = ...  # type: Any
+SERVER_STATUS_AUTOCOMMIT = ...  # type: Any
+SERVER_MORE_RESULTS_EXISTS = ...  # type: Any
+SERVER_QUERY_NO_GOOD_INDEX_USED = ...  # type: Any
+SERVER_QUERY_NO_INDEX_USED = ...  # type: Any
+SERVER_STATUS_CURSOR_EXISTS = ...  # type: Any
+SERVER_STATUS_LAST_ROW_SENT = ...  # type: Any
+SERVER_STATUS_DB_DROPPED = ...  # type: Any
+SERVER_STATUS_NO_BACKSLASH_ESCAPES = ...  # type: Any
+SERVER_STATUS_METADATA_CHANGED = ...  # type: Any
diff --git a/typeshed/third_party/2and3/pymysql/constants/__init__.pyi b/typeshed/third_party/2and3/pymysql/constants/__init__.pyi
new file mode 100644
index 0000000..19336fe
--- /dev/null
+++ b/typeshed/third_party/2and3/pymysql/constants/__init__.pyi
@@ -0,0 +1,3 @@
+# Stubs for pymysql.constants (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
diff --git a/typeshed/third_party/2and3/pymysql/converters.pyi b/typeshed/third_party/2and3/pymysql/converters.pyi
new file mode 100644
index 0000000..d9c8d29
--- /dev/null
+++ b/typeshed/third_party/2and3/pymysql/converters.pyi
@@ -0,0 +1,50 @@
+# Stubs for pymysql.converters (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+from .constants import FIELD_TYPE as FIELD_TYPE, FLAG as FLAG
+from .charset import charset_by_id as charset_by_id
+
+PYTHON3 = ...  # type: Any
+ESCAPE_REGEX = ...  # type: Any
+ESCAPE_MAP = ...  # type: Any
+
+def escape_item(val, charset): ...
+def escape_dict(val, charset): ...
+def escape_sequence(val, charset): ...
+def escape_set(val, charset): ...
+def escape_bool(value): ...
+def escape_object(value): ...
+
+escape_int = ...  # type: Any
+
+escape_long = ...  # type: Any
+
+def escape_float(value): ...
+def escape_string(value): ...
+def escape_unicode(value): ...
+def escape_None(value): ...
+def escape_timedelta(obj): ...
+def escape_time(obj): ...
+def escape_datetime(obj): ...
+def escape_date(obj): ...
+def escape_struct_time(obj): ...
+def convert_datetime(connection, field, obj): ...
+def convert_timedelta(connection, field, obj): ...
+def convert_time(connection, field, obj): ...
+def convert_date(connection, field, obj): ...
+def convert_mysql_timestamp(connection, field, timestamp): ...
+def convert_set(s): ...
+def convert_bit(connection, field, b): ...
+def convert_characters(connection, field, data): ...
+def convert_int(connection, field, data): ...
+def convert_long(connection, field, data): ...
+def convert_float(connection, field, data): ...
+
+encoders = ...  # type: Any
+decoders = ...  # type: Any
+conversions = ...  # type: Any
+
+def convert_decimal(connection, field, data): ...
+def escape_decimal(obj): ...
diff --git a/typeshed/third_party/2and3/pymysql/cursors.pyi b/typeshed/third_party/2and3/pymysql/cursors.pyi
new file mode 100644
index 0000000..a06ac23
--- /dev/null
+++ b/typeshed/third_party/2and3/pymysql/cursors.pyi
@@ -0,0 +1,37 @@
+# Stubs for pymysql.cursors (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Union, Tuple, Any, Dict, Optional, Text
+from .connections import Connection
+
+Gen = Union[Tuple[Any, ...], Dict[str, Any]]
+
+class Cursor:
+    connection = ...  # type: Connection
+    description = ...  # type: Tuple[Text, ...]
+    rownumber = ...  # type: int
+    rowcount = ...  # type: int
+    arraysize = ...  # type: int
+    messages = ...  # type: Any
+    errorhandler = ...  # type: Any
+    lastrowid = ...  # type: int
+    def __init__(self, connection: Connection) -> None: ...
+    def __del__(self) -> None: ...
+    def close(self) -> None: ...
+    def setinputsizes(self, *args): ...
+    def setoutputsizes(self, *args): ...
+    def nextset(self): ...
+    def execute(self, query: str, args=None) -> int: ...
+    def executemany(self, query: str, args) -> int: ...
+    def callproc(self, procname, args=...): ...
+    def fetchone(self) -> Optional[Gen]: ...
+    def fetchmany(self, size: int = None) -> Optional[Gen]: ...
+    def fetchall(self) -> Optional[Tuple[Gen, ...]]: ...
+    def scroll(self, value, mode=''): ...
+    def __iter__(self): ...
+
+class DictCursor(Cursor):
+    def fetchone(self) -> Optional[Dict[str, Any]]: ...
+    def fetchmany(self, size=None) -> Optional[Tuple[Dict[str, Any], ...]]: ...
+    def fetchall(self) -> Optional[Tuple[Dict[str, Any], ...]]: ...
diff --git a/typeshed/third_party/2and3/pymysql/err.pyi b/typeshed/third_party/2and3/pymysql/err.pyi
new file mode 100644
index 0000000..81f8803
--- /dev/null
+++ b/typeshed/third_party/2and3/pymysql/err.pyi
@@ -0,0 +1,22 @@
+# Stubs for pymysql.err (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Dict
+from .constants import ER as ER
+
+class MySQLError(Exception): ...
+class Warning(MySQLError): ...
+class Error(MySQLError): ...
+class InterfaceError(Error): ...
+class DatabaseError(Error): ...
+class DataError(DatabaseError): ...
+class OperationalError(DatabaseError): ...
+class IntegrityError(DatabaseError): ...
+class InternalError(DatabaseError): ...
+class ProgrammingError(DatabaseError): ...
+class NotSupportedError(DatabaseError): ...
+
+error_map = ...  # type: Dict
+
+def raise_mysql_exception(data) -> None: ...
diff --git a/typeshed/third_party/2and3/pymysql/times.pyi b/typeshed/third_party/2and3/pymysql/times.pyi
new file mode 100644
index 0000000..e96c990
--- /dev/null
+++ b/typeshed/third_party/2and3/pymysql/times.pyi
@@ -0,0 +1,14 @@
+# Stubs for pymysql.times (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any
+
+Date = ...  # type: Any
+Time = ...  # type: Any
+TimeDelta = ...  # type: Any
+Timestamp = ...  # type: Any
+
+def DateFromTicks(ticks): ...
+def TimeFromTicks(ticks): ...
+def TimestampFromTicks(ticks): ...
diff --git a/typeshed/third_party/2and3/pymysql/util.pyi b/typeshed/third_party/2and3/pymysql/util.pyi
new file mode 100644
index 0000000..2c2f7c0
--- /dev/null
+++ b/typeshed/third_party/2and3/pymysql/util.pyi
@@ -0,0 +1,7 @@
+# Stubs for pymysql.util (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+def byte2int(b): ...
+def int2byte(i): ...
+def join_bytes(bs): ...
diff --git a/typeshed/third_party/2/sqlalchemy/__init__.pyi b/typeshed/third_party/2and3/sqlalchemy/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/__init__.pyi
rename to typeshed/third_party/2and3/sqlalchemy/__init__.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/databases/__init__.pyi b/typeshed/third_party/2and3/sqlalchemy/databases/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/databases/__init__.pyi
rename to typeshed/third_party/2and3/sqlalchemy/databases/__init__.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/databases/mysql.pyi b/typeshed/third_party/2and3/sqlalchemy/databases/mysql.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/databases/mysql.pyi
rename to typeshed/third_party/2and3/sqlalchemy/databases/mysql.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/dialects/__init__.pyi b/typeshed/third_party/2and3/sqlalchemy/dialects/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/dialects/__init__.pyi
rename to typeshed/third_party/2and3/sqlalchemy/dialects/__init__.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/dialects/mysql/__init__.pyi b/typeshed/third_party/2and3/sqlalchemy/dialects/mysql/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/dialects/mysql/__init__.pyi
rename to typeshed/third_party/2and3/sqlalchemy/dialects/mysql/__init__.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/dialects/mysql/base.pyi b/typeshed/third_party/2and3/sqlalchemy/dialects/mysql/base.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/dialects/mysql/base.pyi
rename to typeshed/third_party/2and3/sqlalchemy/dialects/mysql/base.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/engine/__init__.pyi b/typeshed/third_party/2and3/sqlalchemy/engine/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/engine/__init__.pyi
rename to typeshed/third_party/2and3/sqlalchemy/engine/__init__.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/engine/base.pyi b/typeshed/third_party/2and3/sqlalchemy/engine/base.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/engine/base.pyi
rename to typeshed/third_party/2and3/sqlalchemy/engine/base.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/engine/strategies.pyi b/typeshed/third_party/2and3/sqlalchemy/engine/strategies.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/engine/strategies.pyi
rename to typeshed/third_party/2and3/sqlalchemy/engine/strategies.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/engine/url.pyi b/typeshed/third_party/2and3/sqlalchemy/engine/url.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/engine/url.pyi
rename to typeshed/third_party/2and3/sqlalchemy/engine/url.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/exc.pyi b/typeshed/third_party/2and3/sqlalchemy/exc.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/exc.pyi
rename to typeshed/third_party/2and3/sqlalchemy/exc.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/inspection.pyi b/typeshed/third_party/2and3/sqlalchemy/inspection.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/inspection.pyi
rename to typeshed/third_party/2and3/sqlalchemy/inspection.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/log.pyi b/typeshed/third_party/2and3/sqlalchemy/log.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/log.pyi
rename to typeshed/third_party/2and3/sqlalchemy/log.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/orm/__init__.pyi b/typeshed/third_party/2and3/sqlalchemy/orm/__init__.pyi
similarity index 97%
rename from typeshed/third_party/2/sqlalchemy/orm/__init__.pyi
rename to typeshed/third_party/2and3/sqlalchemy/orm/__init__.pyi
index 280dd02..2abfdfc 100644
--- a/typeshed/third_party/2/sqlalchemy/orm/__init__.pyi
+++ b/typeshed/third_party/2and3/sqlalchemy/orm/__init__.pyi
@@ -1,4 +1,4 @@
-# Stubs for sqlalchemy.orm (Python 2)
+# Stubs for sqlalchemy.orm (Python 2 and 3)
 #
 # NOTE: This dynamically typed stub was automatically generated by stubgen.
 
@@ -6,7 +6,7 @@ from typing import Any
 # from . import mapper
 # from . import interfaces
 # from . import deprecated_interfaces
-# from . import util
+from . import util
 # from . import properties
 # from . import relationships
 # from . import descriptor_props
@@ -27,7 +27,7 @@ from ..util import langhelpers
 # MapperExtension = deprecated_interfaces.MapperExtension
 # SessionExtension = deprecated_interfaces.SessionExtension
 # AttributeExtension = deprecated_interfaces.AttributeExtension
-# aliased = util.aliased
+aliased = util.aliased
 # join = util.join
 # object_mapper = util.object_mapper
 # outerjoin = util.outerjoin
diff --git a/typeshed/third_party/2/sqlalchemy/orm/session.pyi b/typeshed/third_party/2and3/sqlalchemy/orm/session.pyi
similarity index 98%
rename from typeshed/third_party/2/sqlalchemy/orm/session.pyi
rename to typeshed/third_party/2and3/sqlalchemy/orm/session.pyi
index a53d619..8e7d396 100644
--- a/typeshed/third_party/2/sqlalchemy/orm/session.pyi
+++ b/typeshed/third_party/2and3/sqlalchemy/orm/session.pyi
@@ -1,4 +1,4 @@
-# Stubs for sqlalchemy.orm.session (Python 2)
+# Stubs for sqlalchemy.orm.session (Python 2 and 3)
 #
 # NOTE: This dynamically typed stub was automatically generated by stubgen.
 
diff --git a/typeshed/third_party/2and3/sqlalchemy/orm/util.pyi b/typeshed/third_party/2and3/sqlalchemy/orm/util.pyi
new file mode 100644
index 0000000..d3c000c
--- /dev/null
+++ b/typeshed/third_party/2and3/sqlalchemy/orm/util.pyi
@@ -0,0 +1,12 @@
+# Stubs for sqlalchemy.orm.session (Python 2 and 3)
+from typing import Optional, Any, Text
+
+from ..sql.selectable import FromClause
+
+class AliasedClass(object):
+    def __init__(self, cls: Any, alias: Optional[FromClause] =None, name: Optional[Text] =None, flat: bool =False, adapt_on_names: bool =False,
+                 with_polymorphic_mappers: Any =(), with_polymorphic_discriminator: Any =None, base_alias: Any =None, use_mapper_path: bool =False) -> None: ...
+    def __getattr__(self, key): ...
+    def __repr__(self): ...
+
+def aliased(element: Any, alias: Optional[FromClause] =None, name: Optional[Text] =None, flat: bool =False, adapt_on_names: bool =False) -> AliasedClass: ...
diff --git a/typeshed/third_party/2/sqlalchemy/pool.pyi b/typeshed/third_party/2and3/sqlalchemy/pool.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/pool.pyi
rename to typeshed/third_party/2and3/sqlalchemy/pool.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/schema.pyi b/typeshed/third_party/2and3/sqlalchemy/schema.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/schema.pyi
rename to typeshed/third_party/2and3/sqlalchemy/schema.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/sql/__init__.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/sql/__init__.pyi
rename to typeshed/third_party/2and3/sqlalchemy/sql/__init__.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/sql/annotation.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/annotation.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/sql/annotation.pyi
rename to typeshed/third_party/2and3/sqlalchemy/sql/annotation.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/sql/base.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/base.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/sql/base.pyi
rename to typeshed/third_party/2and3/sqlalchemy/sql/base.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/sql/ddl.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/ddl.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/sql/ddl.pyi
rename to typeshed/third_party/2and3/sqlalchemy/sql/ddl.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/sql/dml.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/dml.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/sql/dml.pyi
rename to typeshed/third_party/2and3/sqlalchemy/sql/dml.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/sql/elements.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/elements.pyi
similarity index 61%
rename from typeshed/third_party/2/sqlalchemy/sql/elements.pyi
rename to typeshed/third_party/2and3/sqlalchemy/sql/elements.pyi
index e2a8d38..4b585c6 100644
--- a/typeshed/third_party/2/sqlalchemy/sql/elements.pyi
+++ b/typeshed/third_party/2and3/sqlalchemy/sql/elements.pyi
@@ -1,3 +1,5 @@
+# Stubs for sqlalchemy.sql.elements (Python 2 and 3)
+from typing import Text, Any
 
 from .visitors import Visitable
 from .annotation import Annotated
@@ -7,7 +9,38 @@ from .. import util
 
 class ClauseElement(Visitable): ...
 
-class ColumnElement(ColumnOperators, ClauseElement): ...
+class ColumnElement(ColumnOperators, ClauseElement):
+    __visit_name__ = 'column'  # type: Text
+    primary_key = False  # type: Any
+    foreign_keys = []  # type: Any
+    _label = None  # type: Any
+    _key_label = key = None  # type: Any
+    _alt_names = ()  # type: Any
+    def self_group(self, against=None): ...
+    def _negate(self): ...
+    @util.memoized_property
+    def type(self): ...
+    @util.memoized_property
+    def comparator(self): ...
+    def __getattr__(self, key): ...
+    def operate(self, op, *other, **kwargs): ...
+    def reverse_operate(self, op, other, **kwargs): ...
+    def _bind_param(self, operator, obj): ...
+    @property
+    def expression(self): ...
+    @property
+    def _select_iterable(self): ...
+    @util.memoized_property
+    def base_columns(self): ...
+    @util.memoized_property
+    def proxy_set(self): ...
+    def shares_lineage(self, othercolumn): ...
+    def _compare_name_for_result(self, other): ...
+    def _make_proxy(self, selectable, name=None, name_is_truncatable=False, **kw): ...
+    def compare(self, other, use_proxies=False, equivalents=None, **kw): ...
+    def label(self, name): ...
+    @util.memoized_property
+    def anon_label(self): ...
 
 class BindParameter(ColumnElement): ...
 class BinaryExpression(ColumnElement): ...
diff --git a/typeshed/third_party/2/sqlalchemy/sql/expression.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/expression.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/sql/expression.pyi
rename to typeshed/third_party/2and3/sqlalchemy/sql/expression.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/sql/functions.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/functions.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/sql/functions.pyi
rename to typeshed/third_party/2and3/sqlalchemy/sql/functions.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/sql/naming.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/naming.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/sql/naming.pyi
rename to typeshed/third_party/2and3/sqlalchemy/sql/naming.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/sql/operators.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/operators.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/sql/operators.pyi
rename to typeshed/third_party/2and3/sqlalchemy/sql/operators.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/sql/schema.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/schema.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/sql/schema.pyi
rename to typeshed/third_party/2and3/sqlalchemy/sql/schema.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/sql/selectable.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/selectable.pyi
similarity index 96%
rename from typeshed/third_party/2/sqlalchemy/sql/selectable.pyi
rename to typeshed/third_party/2and3/sqlalchemy/sql/selectable.pyi
index a3b1925..543349d 100644
--- a/typeshed/third_party/2/sqlalchemy/sql/selectable.pyi
+++ b/typeshed/third_party/2and3/sqlalchemy/sql/selectable.pyi
@@ -1,3 +1,5 @@
+from typing import Any
+
 from .base import Immutable, Executable, \
     ColumnCollection, ColumnSet, Generative
 from .elements import ClauseElement, TextClause, ClauseList, \
@@ -35,9 +37,9 @@ class FromClause(Selectable):
     @property
     def columns(self): ...
     @property
-    def primary_key(self): ...
+    def primary_key(self) -> Any: ...
     @property
-    def foreign_keys(self): ...
+    def foreign_keys(self) -> Any: ...
     def _init_collections(self): ...
     @property
     def _cols_populated(self): ...
diff --git a/typeshed/third_party/2/sqlalchemy/sql/sqltypes.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/sqltypes.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/sql/sqltypes.pyi
rename to typeshed/third_party/2and3/sqlalchemy/sql/sqltypes.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/sql/type_api.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/type_api.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/sql/type_api.pyi
rename to typeshed/third_party/2and3/sqlalchemy/sql/type_api.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/sql/visitors.pyi b/typeshed/third_party/2and3/sqlalchemy/sql/visitors.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/sql/visitors.pyi
rename to typeshed/third_party/2and3/sqlalchemy/sql/visitors.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/types.pyi b/typeshed/third_party/2and3/sqlalchemy/types.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/types.pyi
rename to typeshed/third_party/2and3/sqlalchemy/types.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/util/__init__.pyi b/typeshed/third_party/2and3/sqlalchemy/util/__init__.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/util/__init__.pyi
rename to typeshed/third_party/2and3/sqlalchemy/util/__init__.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/util/_collections.pyi b/typeshed/third_party/2and3/sqlalchemy/util/_collections.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/util/_collections.pyi
rename to typeshed/third_party/2and3/sqlalchemy/util/_collections.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/util/compat.pyi b/typeshed/third_party/2and3/sqlalchemy/util/compat.pyi
similarity index 97%
rename from typeshed/third_party/2/sqlalchemy/util/compat.pyi
rename to typeshed/third_party/2and3/sqlalchemy/util/compat.pyi
index 39ec058..191222c 100644
--- a/typeshed/third_party/2/sqlalchemy/util/compat.pyi
+++ b/typeshed/third_party/2and3/sqlalchemy/util/compat.pyi
@@ -1,6 +1,6 @@
 # Stubs for sqlalchemy.util.compat (Python 2)
 
-from typing import Any
+from typing import Any, Text
 from collections import namedtuple
 
 import threading
@@ -34,7 +34,7 @@ def inspect_getargspec(func): ...
 
 string_types = ...  # type: Any
 binary_type = ...  # type: Any
-text_type = unicode
+text_type = Text
 int_types = ...  # type: Any
 
 def callable(fn): ...
diff --git a/typeshed/third_party/2/sqlalchemy/util/deprecations.pyi b/typeshed/third_party/2and3/sqlalchemy/util/deprecations.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/util/deprecations.pyi
rename to typeshed/third_party/2and3/sqlalchemy/util/deprecations.pyi
diff --git a/typeshed/third_party/2/sqlalchemy/util/langhelpers.pyi b/typeshed/third_party/2and3/sqlalchemy/util/langhelpers.pyi
similarity index 100%
rename from typeshed/third_party/2/sqlalchemy/util/langhelpers.pyi
rename to typeshed/third_party/2and3/sqlalchemy/util/langhelpers.pyi
diff --git a/typeshed/third_party/3/dateutil/tz/__init__.pyi b/typeshed/third_party/3/dateutil/tz/__init__.pyi
new file mode 100644
index 0000000..30a7ab3
--- /dev/null
+++ b/typeshed/third_party/3/dateutil/tz/__init__.pyi
@@ -0,0 +1,5 @@
+# Stubs for dateutil.tz (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from .tz import tzutc, tzoffset, tzlocal, tzfile, tzrange, tzstr, tzical, gettz, datetime_exists, datetime_ambiguous
diff --git a/typeshed/third_party/3/dateutil/tz/_common.pyi b/typeshed/third_party/3/dateutil/tz/_common.pyi
new file mode 100644
index 0000000..e0856d2
--- /dev/null
+++ b/typeshed/third_party/3/dateutil/tz/_common.pyi
@@ -0,0 +1,28 @@
+# Stubs for dateutil.tz._common (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, Optional
+from datetime import datetime, tzinfo, timedelta
+
+def tzname_in_python2(namefunc): ...
+def enfold(dt, fold: int = ...): ...
+
+class _DatetimeWithFold(datetime):
+    @property
+    def fold(self): ...
+
+class _tzinfo(tzinfo):
+    def is_ambiguous(self, dt: datetime) -> bool: ...
+    def fromutc(self, dt: datetime) -> datetime: ...
+
+class tzrangebase(_tzinfo):
+    def __init__(self) -> None: ...
+    def utcoffset(self, dt: Optional[datetime]) -> Optional[timedelta]: ...
+    def dst(self, dt: Optional[datetime]) -> Optional[timedelta]: ...
+    def tzname(self, dt: Optional[datetime]) -> str: ...
+    def fromutc(self, dt: datetime) -> datetime: ...
+    def is_ambiguous(self, dt: datetime) -> bool: ...
+    __hash__ = ...  # type: Any
+    def __ne__(self, other): ...
+    __reduce__ = ...  # type: Any
diff --git a/typeshed/third_party/3/dateutil/tz/tz.pyi b/typeshed/third_party/3/dateutil/tz/tz.pyi
new file mode 100644
index 0000000..42bf05a
--- /dev/null
+++ b/typeshed/third_party/3/dateutil/tz/tz.pyi
@@ -0,0 +1,85 @@
+# Stubs for dateutil.tz.tz (Python 3.5)
+#
+# NOTE: This dynamically typed stub was automatically generated by stubgen.
+
+from typing import Any, Optional, Union, IO, Tuple
+import datetime
+from ._common import tzname_in_python2 as tzname_in_python2, _tzinfo as _tzinfo
+from ._common import tzrangebase as tzrangebase, enfold as enfold
+from ..relativedelta import relativedelta
+
+ZERO = ...  # type: datetime.timedelta
+EPOCH = ...  # type: datetime.datetime
+EPOCHORDINAL = ...  # type: int
+
+class tzutc(datetime.tzinfo):
+    def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ...
+    def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ...
+    def tzname(self, dt: Optional[datetime.datetime]) -> str: ...
+    def is_ambiguous(self, dt: Optional[datetime.datetime]) -> bool: ...
+    def __eq__(self, other): ...
+    __hash__ = ...  # type: Any
+    def __ne__(self, other): ...
+    __reduce__ = ...  # type: Any
+
+class tzoffset(datetime.tzinfo):
+    def __init__(self, name, offset) -> None: ...
+    def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ...
+    def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ...
+    def is_ambiguous(self, dt: Optional[datetime.datetime]) -> bool: ...
+    def tzname(self, dt: Optional[datetime.datetime]) -> str: ...
+    def __eq__(self, other): ...
+    __hash__ = ...  # type: Any
+    def __ne__(self, other): ...
+    __reduce__ = ...  # type: Any
+
+class tzlocal(_tzinfo):
+    def __init__(self) -> None: ...
+    def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ...
+    def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ...
+    def tzname(self, dt: Optional[datetime.datetime]) -> str: ...
+    def is_ambiguous(self, dt: Optional[datetime.datetime]) -> bool: ...
+    def __eq__(self, other): ...
+    __hash__ = ...  # type: Any
+    def __ne__(self, other): ...
+    __reduce__ = ...  # type: Any
+
+class _ttinfo:
+    def __init__(self) -> None: ...
+    def __eq__(self, other): ...
+    __hash__ = ...  # type: Any
+    def __ne__(self, other): ...
+
+class tzfile(_tzinfo):
+    def __init__(self, fileobj: Union[str, IO[str]], filename: Optional[str] = ...) -> None: ...
+    def is_ambiguous(self, dt: Optional[datetime.datetime], idx: Optional[int] = ...) -> bool: ...
+    def utcoffset(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ...
+    def dst(self, dt: Optional[datetime.datetime]) -> Optional[datetime.timedelta]: ...
+    def tzname(self, dt: Optional[datetime.datetime]) -> str: ...
+    def __eq__(self, other): ...
+    __hash__ = ...  # type: Any
+    def __ne__(self, other): ...
+    def __reduce__(self): ...
+    def __reduce_ex__(self, protocol): ...
+
+class tzrange(tzrangebase):
+    hasdst = ...  # type: bool
+    def __init__(self, stdabbr: str, stdoffset: Union[int, datetime.timedelta, None] = ..., dstabbr: Optional[str] = ..., dstoffset: Union[int, datetime.timedelta, None] = ..., start: Optional[relativedelta] = ..., end: Optional[relativedelta] = ...) -> None: ...
+    def transitions(self, year: int) -> Tuple[datetime.datetime, datetime.datetime]: ...
+    def __eq__(self, other): ...
+
+class tzstr(tzrange):
+    hasdst = ...  # type: bool
+    def __init__(self, s, posix_offset: bool = ...) -> None: ...
+
+class tzical:
+    def __init__(self, fileobj: Union[str, IO[str]]) -> None: ...
+    def keys(self): ...
+    def get(self, tzid: Optional[Any] = ...): ...
+
+TZFILES = ...  # type: List[str]
+TZPATHS = ...  # type: List[str]
+
+def gettz(name: Optional[str] = ...) -> Optional[datetime.tzinfo]: ...
+def datetime_exists(dt: datetime.datetime, tz: Optional[datetime.tzinfo] = ...) -> bool: ...
+def datetime_ambiguous(dt: datetime.datetime, tz: Optional[datetime.tzinfo] = ...) -> bool: ...
diff --git a/typeshed/third_party/3/requests/api.pyi b/typeshed/third_party/3/requests/api.pyi
index f5f1ec3..120f695 100644
--- a/typeshed/third_party/3/requests/api.pyi
+++ b/typeshed/third_party/3/requests/api.pyi
@@ -1,14 +1,22 @@
 # Stubs for requests.api (Python 3)
 
-from typing import Optional, Union, Any
+from typing import Optional, Union, Any, Iterable, Mapping, Tuple
 
 from .models import Response
 
+ParamsMappingValueType = Union[str, bytes, int, float, Iterable[Union[str, bytes, int, float]]]
+
 def request(method: str, url: str, **kwargs) -> Response: ...
 def get(url: Union[str, bytes],
-        params: Optional[Union[str,
-                               bytes,
-                               dict[Union[str, bytes], Union[str, bytes]]]]=None,
+        params: Optional[
+            Union[
+                Mapping[Union[str, bytes, int, float], ParamsMappingValueType],
+                Union[str, bytes],
+                Tuple[Union[str, bytes, int, float], ParamsMappingValueType],
+                Mapping[str, ParamsMappingValueType],
+                Mapping[bytes, ParamsMappingValueType],
+                Mapping[int, ParamsMappingValueType],
+                Mapping[float, ParamsMappingValueType]]]=None,
         **kwargs) -> Response: ...
 def options(url: str, **kwargs) -> Response: ...
 def head(url: str, **kwargs) -> Response: ...

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/mypy.git



More information about the debian-med-commit mailing list