From a92bfa476467822e5723edb689cb1e80c13da52d Mon Sep 17 00:00:00 2001
From: "Felipe M. Vieira"
Date: Sat, 5 May 2018 12:52:29 -0300
Subject: [PATCH 001/185] set pymode's default python to python3
---
plugin/pymode.vim | 6 +-----
1 file changed, 1 insertion(+), 5 deletions(-)
diff --git a/plugin/pymode.vim b/plugin/pymode.vim
index 969ade7b..01b734e5 100644
--- a/plugin/pymode.vim
+++ b/plugin/pymode.vim
@@ -19,7 +19,7 @@ filetype plugin on
" OPTIONS: {{{
" Vim Python interpreter. Set to 'disable' for remove python features.
-call pymode#default('g:pymode_python', '')
+call pymode#default('g:pymode_python', 'python3')
" Disable pymode warnings
call pymode#default('g:pymode_warning', 1)
@@ -274,10 +274,6 @@ if &compatible
endif
filetype plugin on
-" Disable python-related functionality
-" let g:pymode_python = 'disable'
-" let g:pymode_python = 'python3'
-
" UltiSnips Fixes
if !len(g:pymode_python)
if exists('g:_uspy') && g:_uspy == ':py'
From 5f78649aa58e86cf7ee62278c590f020c2b3c050 Mon Sep 17 00:00:00 2001
From: Diego Rabatone Oliveira
Date: Fri, 1 Jun 2018 18:10:17 -0300
Subject: [PATCH 002/185] Set python3 as default, if it exists.
If there is a python3 installed on the system, then it is going to be
the default to be used. Otherwise, we use just the python command. It
can even be a python3 version, in systems that only have python3 or the
python command is linked to the python3 version.
This commit fixes #896.
---
plugin/pymode.vim | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/plugin/pymode.vim b/plugin/pymode.vim
index 01b734e5..218d4d83 100644
--- a/plugin/pymode.vim
+++ b/plugin/pymode.vim
@@ -19,7 +19,11 @@ filetype plugin on
" OPTIONS: {{{
" Vim Python interpreter. Set to 'disable' for remove python features.
-call pymode#default('g:pymode_python', 'python3')
+if executable('python3')
+ call pymode#default('g:pymode_python', 'python3')
+else
+ call pymode#default('g:pymode_python', 'python')
+endif
" Disable pymode warnings
call pymode#default('g:pymode_warning', 1)
From 232801848221f5a8e437e960929b6acab9475a53 Mon Sep 17 00:00:00 2001
From: Diego Rabatone Oliveira
Date: Fri, 1 Jun 2018 18:53:58 -0300
Subject: [PATCH 003/185] Improve travis-ci configuration
---
.travis.yml | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index 9be7bb46..8a689acb 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,12 +1,15 @@
# Build vim from source with python3 support and execute tests.
+dist: trusty
+sudo: required
branches:
only:
- develop
- dev_unstable
before_install:
- export ORIGINAL_FOLDER=$PWD
- - sudo apt install libncurses5-dev libgnome2-dev libgnomeui-dev libgtk2.0-dev libatk1.0-dev libbonoboui2-dev libcairo2-dev libx11-dev libxpm-dev libxt-dev python-dev python3-dev lua5.1 lua5.1-dev libperl-dev git
- - sudo apt remove vim vim-runtime gvim
+ - sudo apt update
+ - sudo apt install -yqq libncurses5-dev libgnome2-dev libgnomeui-dev libgtk2.0-dev libatk1.0-dev libbonoboui2-dev libcairo2-dev libx11-dev libxpm-dev libxt-dev python-dev python3-dev lua5.1 lua5.1-dev libperl-dev git
+ - sudo apt remove --purge vim vim-runtime gvim
- cd /tmp
- git clone https://github.com/vim/vim.git
- cd vim
From 521b4980433776f50e44fe37cb0dc0c21fd12050 Mon Sep 17 00:00:00 2001
From: Xiangyu Xu
Date: Tue, 12 Jun 2018 02:53:27 -0500
Subject: [PATCH 004/185] Update submodules Move astroid from pymode/libs to
submodules and update Move pylint from pymode/libs to submodules and update
Move rope from pymode/libs to submodules and update Update logilab to 1.4.1
from PyPI Update submodules recursively
---
.gitmodules | 9 +
pymode/libs/astroid | 1 +
pymode/libs/astroid/__pkginfo__.py | 61 -
pymode/libs/astroid/astpeephole.py | 74 -
pymode/libs/astroid/brain/brain_numpy.py | 50 -
pymode/libs/astroid/brain/brain_six.py | 276 -
pymode/libs/astroid/brain/brain_typing.py | 89 -
pymode/libs/astroid/node_classes.py | 2082 --------
pymode/libs/logilab | 1 +
pymode/libs/logilab-common-1.4.1/COPYING | 339 ++
.../libs/logilab-common-1.4.1/COPYING.LESSER | 510 ++
pymode/libs/logilab-common-1.4.1/ChangeLog | 1613 ++++++
pymode/libs/logilab-common-1.4.1/MANIFEST.in | 14 +
pymode/libs/logilab-common-1.4.1/PKG-INFO | 164 +
.../README} | 3 -
.../libs/logilab-common-1.4.1/__pkginfo__.py | 61 +
.../logilab-common-1.4.1/bin/logilab-pytest | 7 +
.../bin/logilab-pytest.bat | 17 +
.../logilab-common-1.4.1/doc/logilab-pytest.1 | 54 +
pymode/libs/logilab-common-1.4.1/doc/makefile | 8 +
.../logilab-common-1.4.1/logilab/__init__.py | 1 +
.../logilab/common/__init__.py | 2 +-
.../logilab/common/cache.py | 0
.../logilab/common/changelog.py | 65 +-
.../logilab/common/clcommands.py | 0
.../logilab/common/compat.py | 0
.../logilab/common/configuration.py | 19 +-
.../logilab/common/daemon.py | 0
.../logilab/common/date.py | 2 +-
.../logilab/common/debugger.py | 0
.../logilab/common/decorators.py | 0
.../logilab/common/deprecation.py | 0
.../logilab/common/fileutils.py | 15 +-
.../logilab/common/graph.py | 0
.../logilab/common/interface.py | 0
.../logilab/common/logging_ext.py | 0
.../logilab/common/modutils.py | 64 +-
.../logilab/common/optik_ext.py | 6 +-
.../logilab/common/optparser.py | 0
.../logilab/common/proc.py | 0
.../logilab/common/pytest.py | 254 +-
.../logilab/common/registry.py | 67 +-
.../logilab/common/shellutils.py | 58 +-
.../logilab/common/sphinx_ext.py | 0
.../logilab/common/sphinxutils.py | 0
.../logilab/common/table.py | 0
.../logilab/common/tasksqueue.py | 0
.../logilab/common/testlib.py | 708 +++
.../logilab/common/textutils.py | 2 +
.../logilab/common/tree.py | 0
.../logilab/common/umessage.py | 107 +-
.../logilab/common/ureports/__init__.py | 0
.../logilab/common/ureports/docbook_writer.py | 0
.../logilab/common/ureports/html_writer.py | 0
.../logilab/common/ureports/nodes.py | 0
.../logilab/common/ureports/text_writer.py | 0
.../logilab/common/urllib2ext.py | 0
.../logilab/common/vcgutils.py | 0
.../logilab/common/visitor.py | 0
.../logilab/common/xmlutils.py | 0
pymode/libs/logilab-common-1.4.1/setup.cfg | 9 +
pymode/libs/logilab-common-1.4.1/setup.py | 54 +
.../logilab-common-1.4.1/test/data/ChangeLog | 184 +
.../test/data/MyPyPa-0.1.0.zip | Bin 0 -> 206 bytes
.../test/data}/__init__.py | 0
.../test/data/__pkginfo__.py | 57 +
.../test/data/content_differ_dir/NOTHING} | 0
.../test/data/content_differ_dir/README | 1 +
.../test/data/content_differ_dir/subdir/coin | 1 +
.../data/content_differ_dir/subdir/toto.txt | 53 +
.../test/data/deprecation.py | 4 +
.../test/data/file_differ_dir/NOTHING} | 0
.../test/data/file_differ_dir/README | 1 +
.../test/data/file_differ_dir/subdir/toto.txt | 53 +
.../data/file_differ_dir/subdirtwo/Hello} | 0
.../test/data/find_test}/__init__.py | 0
.../test/data/find_test/foo.txt} | 0
.../test/data/find_test/module.py | 0
.../test/data/find_test/module2.py | 0
.../test/data/find_test/newlines.txt | 0
.../test/data/find_test/noendingnewline.py | 0
.../test/data/find_test/nonregr.py | 0
.../test/data/find_test/normal_file.txt | 0
.../test/data/find_test/spam.txt | 0
.../test/data/find_test/sub/doc.txt | 0
.../test/data/find_test/sub/momo.py | 0
.../test/data/find_test/test.ini | 0
.../test/data/find_test/test1.msg | 0
.../test/data/find_test/test2.msg | 0
.../data/find_test/write_protected_file.txt | 0
.../logilab-common-1.4.1/test/data/foo.txt | 9 +
.../test/data/lmfp/__init__.py | 2 +
.../test/data/lmfp/foo.py | 6 +
.../logilab-common-1.4.1/test/data/module.py | 69 +
.../logilab-common-1.4.1/test/data/module2.py | 77 +
.../test/data/newlines.txt | 3 +
.../test/data/noendingnewline.py | 36 +
.../logilab-common-1.4.1/test/data/nonregr.py | 16 +
.../test/data/normal_file.txt | 0
.../test/data/reference_dir/NOTHING | 0
.../test/data/reference_dir/README | 1 +
.../test/data/reference_dir/subdir/coin | 1 +
.../test/data/reference_dir/subdir/toto.txt | 53 +
.../test/data/regobjects.py | 22 +
.../test/data/regobjects2.py | 8 +
.../test/data/same_dir/NOTHING | 0
.../test/data/same_dir/README | 1 +
.../test/data/same_dir/subdir/coin | 1 +
.../test/data/same_dir/subdir/toto.txt | 53 +
.../logilab-common-1.4.1/test/data/spam.txt | 9 +
.../test/data/sub/doc.txt | 1 +
.../test/data/sub/momo.py | 3 +
.../test/data/subdir_differ_dir/NOTHING | 0
.../test/data/subdir_differ_dir/README | 1 +
.../test/data/subdir_differ_dir/subdir/coin | 1 +
.../data/subdir_differ_dir/subdir/toto.txt | 53 +
.../logilab-common-1.4.1/test/data/test.ini | 20 +
.../logilab-common-1.4.1/test/data/test1.msg | 30 +
.../logilab-common-1.4.1/test/data/test2.msg | 42 +
.../test/data/write_protected_file.txt | 0
.../test/unittest_cache.py | 129 +
.../test/unittest_changelog.py | 40 +
.../test/unittest_configuration.py | 509 ++
.../test/unittest_date.py | 206 +
.../test/unittest_decorators.py | 208 +
.../test/unittest_deprecation.py | 147 +
.../test/unittest_fileutils.py | 146 +
.../test/unittest_graph.py | 89 +
.../test/unittest_interface.py | 87 +
.../test/unittest_modutils.py | 296 ++
.../test/unittest_pytest.py | 86 +
.../test/unittest_registry.py | 220 +
.../test/unittest_shellutils.py | 235 +
.../test/unittest_table.py | 448 ++
.../test/unittest_taskqueue.py | 71 +
.../test/unittest_testlib.py | 790 +++
.../test/unittest_textutils.py | 268 +
.../test/unittest_tree.py | 247 +
.../test/unittest_umessage.py | 94 +
.../test/unittest_ureports_html.py | 63 +
.../test/unittest_ureports_text.py | 104 +
.../test/unittest_xmlutils.py | 75 +
.../libs/logilab-common-1.4.1/test/utils.py | 96 +
pymode/libs/logilab/__init__.py | 1 -
pymode/libs/logilab/common/testlib.py | 1338 -----
.../libs/logilab_common-1.0.2-py2.7-nspkg.pth | 1 -
.../logilab_common-1.0.2.dist-info/METADATA | 169 -
.../logilab_common-1.0.2.dist-info/RECORD | 87 -
.../libs/logilab_common-1.0.2.dist-info/WHEEL | 5 -
.../metadata.json | 1 -
.../namespace_packages.txt | 1 -
.../top_level.txt | 1 -
pymode/libs/pylint | 1 +
pymode/libs/pylint/__init__.py | 29 -
pymode/libs/pylint/__main__.py | 7 -
pymode/libs/pylint/__pkginfo__.py | 98 -
pymode/libs/pylint/checkers/__init__.py | 116 -
pymode/libs/pylint/checkers/async.py | 75 -
pymode/libs/pylint/checkers/base.py | 1660 ------
pymode/libs/pylint/checkers/classes.py | 1402 ------
.../libs/pylint/checkers/design_analysis.py | 334 --
pymode/libs/pylint/checkers/exceptions.py | 389 --
pymode/libs/pylint/checkers/format.py | 1069 ----
pymode/libs/pylint/checkers/imports.py | 768 ---
pymode/libs/pylint/checkers/logging.py | 271 -
pymode/libs/pylint/checkers/misc.py | 99 -
pymode/libs/pylint/checkers/newstyle.py | 179 -
pymode/libs/pylint/checkers/python3.py | 861 ----
pymode/libs/pylint/checkers/raw_metrics.py | 115 -
pymode/libs/pylint/checkers/refactoring.py | 715 ---
pymode/libs/pylint/checkers/similar.py | 363 --
pymode/libs/pylint/checkers/spelling.py | 265 -
pymode/libs/pylint/checkers/stdlib.py | 278 -
pymode/libs/pylint/checkers/strings.py | 621 ---
pymode/libs/pylint/checkers/typecheck.py | 1289 -----
pymode/libs/pylint/checkers/utils.py | 860 ----
pymode/libs/pylint/checkers/variables.py | 1324 -----
pymode/libs/pylint/config.py | 831 ---
pymode/libs/pylint/epylint.py | 175 -
pymode/libs/pylint/exceptions.py | 15 -
.../pylint/extensions/_check_docs_utils.py | 580 ---
pymode/libs/pylint/extensions/bad_builtin.py | 67 -
pymode/libs/pylint/extensions/check_docs.py | 21 -
pymode/libs/pylint/extensions/check_elif.py | 67 -
.../libs/pylint/extensions/comparetozero.py | 71 -
pymode/libs/pylint/extensions/docparams.py | 419 --
pymode/libs/pylint/extensions/docstyle.py | 75 -
pymode/libs/pylint/extensions/emptystring.py | 71 -
pymode/libs/pylint/extensions/mccabe.py | 170 -
.../extensions/overlapping_exceptions.py | 81 -
.../extensions/redefined_variable_type.py | 104 -
pymode/libs/pylint/graph.py | 170 -
pymode/libs/pylint/interfaces.py | 94 -
pymode/libs/pylint/lint.py | 1365 -----
pymode/libs/pylint/pyreverse/__init__.py | 9 -
pymode/libs/pylint/pyreverse/diadefslib.py | 228 -
pymode/libs/pylint/pyreverse/diagrams.py | 249 -
pymode/libs/pylint/pyreverse/inspector.py | 361 --
pymode/libs/pylint/pyreverse/main.py | 137 -
pymode/libs/pylint/pyreverse/utils.py | 201 -
pymode/libs/pylint/pyreverse/vcgutils.py | 185 -
pymode/libs/pylint/pyreverse/writer.py | 188 -
pymode/libs/pylint/reporters/__init__.py | 126 -
pymode/libs/pylint/reporters/json.py | 56 -
pymode/libs/pylint/reporters/text.py | 235 -
.../pylint/reporters/ureports/__init__.py | 94 -
.../libs/pylint/reporters/ureports/nodes.py | 182 -
.../pylint/reporters/ureports/text_writer.py | 94 -
pymode/libs/pylint/testutils.py | 382 --
pymode/libs/pylint/utils.py | 1182 -----
pymode/libs/rope | 1 +
pymode/libs/rope/__init__.py | 19 -
pymode/libs/rope/base/__init__.py | 8 -
pymode/libs/rope/base/arguments.py | 111 -
pymode/libs/rope/base/ast.py | 76 -
pymode/libs/rope/base/astutils.py | 64 -
pymode/libs/rope/base/builtins.py | 812 ---
pymode/libs/rope/base/change.py | 450 --
pymode/libs/rope/base/codeanalyze.py | 362 --
pymode/libs/rope/base/default_config.py | 115 -
pymode/libs/rope/base/evaluate.py | 332 --
pymode/libs/rope/base/exceptions.py | 61 -
pymode/libs/rope/base/fscommands.py | 288 --
pymode/libs/rope/base/history.py | 235 -
pymode/libs/rope/base/libutils.py | 122 -
pymode/libs/rope/base/oi/__init__.py | 38 -
pymode/libs/rope/base/oi/doa.py | 166 -
pymode/libs/rope/base/oi/memorydb.py | 127 -
pymode/libs/rope/base/oi/objectdb.py | 179 -
pymode/libs/rope/base/oi/objectinfo.py | 232 -
pymode/libs/rope/base/oi/runmod.py | 222 -
pymode/libs/rope/base/oi/soa.py | 139 -
pymode/libs/rope/base/oi/soi.py | 222 -
pymode/libs/rope/base/oi/transform.py | 285 --
.../rope/base/oi/type_hinting/evaluate.py | 353 --
.../libs/rope/base/oi/type_hinting/factory.py | 70 -
.../rope/base/oi/type_hinting/interfaces.py | 25 -
.../oi/type_hinting/providers/composite.py | 59 -
.../oi/type_hinting/providers/docstrings.py | 193 -
.../oi/type_hinting/providers/inheritance.py | 66 -
.../oi/type_hinting/providers/interfaces.py | 37 -
.../type_hinting/providers/numpydocstrings.py | 41 -
.../providers/pep0484_type_comments.py | 42 -
.../oi/type_hinting/resolvers/composite.py | 22 -
.../oi/type_hinting/resolvers/interfaces.py | 10 -
.../base/oi/type_hinting/resolvers/types.py | 16 -
.../libs/rope/base/oi/type_hinting/utils.py | 136 -
pymode/libs/rope/base/prefs.py | 41 -
pymode/libs/rope/base/project.py | 491 --
pymode/libs/rope/base/pycore.py | 346 --
pymode/libs/rope/base/pynames.py | 201 -
pymode/libs/rope/base/pynamesdef.py | 55 -
pymode/libs/rope/base/pyobjects.py | 311 --
pymode/libs/rope/base/pyobjectsdef.py | 562 ---
pymode/libs/rope/base/pyscopes.py | 314 --
pymode/libs/rope/base/resourceobserver.py | 272 -
pymode/libs/rope/base/resources.py | 243 -
pymode/libs/rope/base/simplify.py | 55 -
pymode/libs/rope/base/stdmods.py | 61 -
pymode/libs/rope/base/taskhandle.py | 131 -
pymode/libs/rope/base/utils/__init__.py | 98 -
pymode/libs/rope/base/utils/datastructures.py | 67 -
pymode/libs/rope/base/utils/pycompat.py | 45 -
pymode/libs/rope/base/worder.py | 525 --
pymode/libs/rope/contrib/__init__.py | 7 -
pymode/libs/rope/contrib/autoimport.py | 222 -
pymode/libs/rope/contrib/changestack.py | 52 -
pymode/libs/rope/contrib/codeassist.py | 695 ---
pymode/libs/rope/contrib/finderrors.py | 91 -
pymode/libs/rope/contrib/findit.py | 114 -
pymode/libs/rope/contrib/fixmodnames.py | 69 -
pymode/libs/rope/contrib/fixsyntax.py | 181 -
pymode/libs/rope/contrib/generate.py | 362 --
pymode/libs/rope/refactor/__init__.py | 55 -
pymode/libs/rope/refactor/change_signature.py | 352 --
.../libs/rope/refactor/encapsulate_field.py | 209 -
pymode/libs/rope/refactor/extract.py | 810 ---
pymode/libs/rope/refactor/functionutils.py | 222 -
.../rope/refactor/importutils/__init__.py | 316 --
.../libs/rope/refactor/importutils/actions.py | 361 --
.../rope/refactor/importutils/importinfo.py | 201 -
.../refactor/importutils/module_imports.py | 506 --
pymode/libs/rope/refactor/inline.py | 625 ---
.../libs/rope/refactor/introduce_factory.py | 135 -
.../libs/rope/refactor/introduce_parameter.py | 96 -
pymode/libs/rope/refactor/localtofield.py | 49 -
pymode/libs/rope/refactor/method_object.py | 90 -
pymode/libs/rope/refactor/move.py | 784 ---
pymode/libs/rope/refactor/multiproject.py | 78 -
pymode/libs/rope/refactor/occurrences.py | 402 --
pymode/libs/rope/refactor/patchedast.py | 829 ---
pymode/libs/rope/refactor/rename.py | 220 -
pymode/libs/rope/refactor/restructure.py | 307 --
pymode/libs/rope/refactor/similarfinder.py | 370 --
pymode/libs/rope/refactor/sourceutils.py | 91 -
pymode/libs/rope/refactor/suites.py | 158 -
pymode/libs/rope/refactor/topackage.py | 32 -
pymode/libs/rope/refactor/usefunction.py | 174 -
pymode/libs/rope/refactor/wildcards.py | 178 -
submodules/astroid/.coveragerc | 9 +
submodules/astroid/.github/ISSUE_TEMPLATE.md | 13 +
.../astroid/.github/PULL_REQUEST_TEMPLATE.md | 2 +
submodules/astroid/.gitignore | 15 +
submodules/astroid/.travis.yml | 43 +
submodules/astroid/COPYING | 339 ++
submodules/astroid/COPYING.LESSER | 510 ++
submodules/astroid/ChangeLog | 1778 +++++++
submodules/astroid/MANIFEST.in | 7 +
submodules/astroid/README.rst | 66 +
submodules/astroid/appveyor.yml | 32 +
submodules/astroid/appveyor/install.ps1 | 27 +
.../astroid}/astroid/__init__.py | 64 +-
submodules/astroid/astroid/__pkginfo__.py | 48 +
submodules/astroid/astroid/_ast.py | 43 +
.../astroid}/astroid/arguments.py | 7 +-
.../astroid}/astroid/as_string.py | 8 +-
.../astroid}/astroid/bases.py | 107 +-
.../astroid/astroid/brain/brain_attrs.py | 60 +
.../astroid/brain/brain_builtin_inference.py | 328 +-
.../astroid/brain/brain_collections.py | 39 +-
.../astroid/astroid/brain/brain_curses.py | 177 +
.../astroid}/astroid/brain/brain_dateutil.py | 0
.../astroid}/astroid/brain/brain_fstrings.py | 3 +-
.../astroid}/astroid/brain/brain_functools.py | 2 +-
.../astroid}/astroid/brain/brain_gi.py | 0
.../astroid}/astroid/brain/brain_hashlib.py | 10 +-
.../astroid}/astroid/brain/brain_io.py | 0
.../astroid}/astroid/brain/brain_mechanize.py | 0
.../astroid/brain/brain_multiprocessing.py | 0
.../astroid/brain/brain_namedtuple_enum.py | 228 +-
.../astroid}/astroid/brain/brain_nose.py | 0
.../astroid/astroid/brain/brain_numpy.py | 326 ++
.../astroid/brain/brain_pkg_resources.py | 1 +
.../astroid}/astroid/brain/brain_pytest.py | 0
.../astroid}/astroid/brain/brain_qt.py | 22 +-
.../astroid/astroid/brain/brain_random.py | 98 +
.../astroid}/astroid/brain/brain_re.py | 0
submodules/astroid/astroid/brain/brain_six.py | 191 +
.../astroid}/astroid/brain/brain_ssl.py | 0
.../astroid/brain/brain_subprocess.py | 13 +-
.../astroid}/astroid/brain/brain_threading.py | 0
.../astroid/astroid/brain/brain_typing.py | 93 +
.../astroid/astroid/brain/brain_uuid.py | 22 +
.../astroid}/astroid/builder.py | 77 +-
.../astroid}/astroid/context.py | 63 +-
.../astroid}/astroid/decorators.py | 18 +-
.../astroid}/astroid/exceptions.py | 18 +-
.../astroid}/astroid/helpers.py | 121 +-
.../astroid}/astroid/inference.py | 158 +-
.../astroid/astroid/interpreter/__init__.py | 0
.../astroid/interpreter/_import/__init__.py | 0
.../astroid/interpreter/_import/spec.py | 13 +-
.../astroid/interpreter/_import/util.py | 1 -
.../astroid/interpreter/dunder_lookup.py | 0
.../astroid/interpreter/objectmodel.py | 131 +-
.../astroid}/astroid/manager.py | 53 +-
.../astroid}/astroid/mixins.py | 64 +-
.../astroid}/astroid/modutils.py | 75 +-
submodules/astroid/astroid/node_classes.py | 4465 ++++++++++++++++
.../astroid}/astroid/nodes.py | 24 +-
.../astroid}/astroid/objects.py | 23 +-
.../astroid}/astroid/protocols.py | 97 +-
.../astroid}/astroid/raw_building.py | 20 +-
.../astroid}/astroid/rebuilder.py | 305 +-
.../astroid}/astroid/scoped_nodes.py | 1432 +++++-
.../astroid}/astroid/test_utils.py | 8 +-
submodules/astroid/astroid/tests/__init__.py | 0
submodules/astroid/astroid/tests/resources.py | 63 +
.../python2/data/MyPyPa-0.1.0-py2.5.zip | Bin 0 -> 1222 bytes
.../testdata/python2/data/SSL1/Connection1.py | 14 +
.../testdata/python2/data/SSL1/__init__.py | 1 +
.../tests/testdata/python2/data/__init__.py | 1 +
.../testdata/python2/data/absimp/__init__.py | 5 +
.../data/absimp/sidepackage/__init__.py | 3 +
.../testdata/python2/data/absimp/string.py | 3 +
.../tests/testdata/python2/data/absimport.py | 3 +
.../tests/testdata/python2/data/all.py | 9 +
.../testdata/python2/data/appl/__init__.py | 3 +
.../python2/data/appl/myConnection.py | 12 +
.../namespace_pep_420/submodule.py | 1 +
.../testdata/python2/data/descriptor_crash.py | 11 +
.../tests/testdata/python2/data/email.py | 1 +
.../python2/data/find_test/__init__.py | 0
.../testdata/python2/data/find_test/module.py | 0
.../python2/data/find_test/module2.py | 0
.../python2/data/find_test/noendingnewline.py | 0
.../python2/data/find_test/nonregr.py | 0
.../python2/data/foogle/fax/__init__.py | 0
.../testdata/python2/data/foogle/fax/a.py | 1 +
.../data/foogle_fax-0.12.5-py2.7-nspkg.pth | 2 +
.../tests/testdata/python2/data/format.py | 34 +
.../testdata/python2/data/invalid_encoding.py | 1 +
.../testdata/python2/data/lmfp/__init__.py | 2 +
.../tests/testdata/python2/data/lmfp/foo.py | 6 +
.../tests/testdata/python2/data/module.py | 89 +
.../python2/data/module1abs/__init__.py | 4 +
.../testdata/python2/data/module1abs/core.py | 1 +
.../tests/testdata/python2/data/module2.py | 143 +
.../python2/data/namespace_pep_420/module.py | 1 +
.../testdata/python2/data/noendingnewline.py | 36 +
.../tests/testdata/python2/data/nonregr.py | 57 +
.../tests/testdata/python2/data/notall.py | 7 +
.../testdata/python2/data/notamodule/file.py | 0
.../testdata/python2/data/package/__init__.py | 4 +
.../python2/data/package/absimport.py | 6 +
.../testdata/python2/data/package/hello.py | 2 +
.../import_package_subpackage_module.py | 49 +
.../data/package/subpackage/__init__.py | 1 +
.../python2/data/package/subpackage/module.py | 1 +
.../path_pkg_resources_1/package/__init__.py | 1 +
.../data/path_pkg_resources_1/package/foo.py | 0
.../path_pkg_resources_2/package/__init__.py | 1 +
.../data/path_pkg_resources_2/package/bar.py | 0
.../path_pkg_resources_3/package/__init__.py | 1 +
.../data/path_pkg_resources_3/package/baz.py | 0
.../data/path_pkgutil_1/package/__init__.py | 2 +
.../data/path_pkgutil_1/package/foo.py | 0
.../data/path_pkgutil_2/package/__init__.py | 2 +
.../data/path_pkgutil_2/package/bar.py | 0
.../data/path_pkgutil_3/package/__init__.py | 2 +
.../data/path_pkgutil_3/package/baz.py | 0
.../tests/testdata/python2/data/recursion.py | 3 +
.../testdata/python2/data/tmp__init__.py | 0
.../python2/data/unicode_package/__init__.py | 1 +
.../data/unicode_package/core/__init__.py | 0
.../python3/data/MyPyPa-0.1.0-py2.5.zip | Bin 0 -> 1222 bytes
.../testdata/python3/data/SSL1/Connection1.py | 14 +
.../testdata/python3/data/SSL1/__init__.py | 1 +
.../tests/testdata/python3/data/__init__.py | 1 +
.../testdata/python3/data/absimp/__init__.py | 5 +
.../data/absimp/sidepackage/__init__.py | 3 +
.../testdata/python3/data/absimp/string.py | 3 +
.../tests/testdata/python3/data/absimport.py | 3 +
.../tests/testdata/python3/data/all.py | 9 +
.../testdata/python3/data/appl/__init__.py | 3 +
.../python3/data/appl/myConnection.py | 11 +
.../namespace_pep_420/submodule.py | 1 +
.../testdata/python3/data/descriptor_crash.py | 11 +
.../tests/testdata/python3/data/email.py | 1 +
.../python3/data/find_test/__init__.py | 0
.../testdata/python3/data/find_test/module.py | 0
.../python3/data/find_test/module2.py | 0
.../python3/data/find_test/noendingnewline.py | 0
.../python3/data/find_test/nonregr.py | 0
.../python3/data/foogle/fax/__init__.py | 0
.../testdata/python3/data/foogle/fax/a.py | 1 +
.../data/foogle_fax-0.12.5-py2.7-nspkg.pth | 2 +
.../tests/testdata/python3/data/format.py | 34 +
.../testdata/python3/data/invalid_encoding.py | 1 +
.../testdata/python3/data/lmfp/__init__.py | 2 +
.../tests/testdata/python3/data/lmfp/foo.py | 6 +
.../tests/testdata/python3/data/module.py | 88 +
.../python3/data/module1abs/__init__.py | 4 +
.../testdata/python3/data/module1abs/core.py | 1 +
.../tests/testdata/python3/data/module2.py | 144 +
.../python3/data/namespace_pep_420/module.py | 1 +
.../testdata/python3/data/noendingnewline.py | 36 +
.../tests/testdata/python3/data/nonregr.py | 57 +
.../tests/testdata/python3/data/notall.py | 8 +
.../testdata/python3/data/notamodule/file.py | 0
.../testdata/python3/data/package/__init__.py | 4 +
.../python3/data/package/absimport.py | 6 +
.../testdata/python3/data/package/hello.py | 2 +
.../import_package_subpackage_module.py | 49 +
.../data/package/subpackage/__init__.py | 1 +
.../python3/data/package/subpackage/module.py | 1 +
.../path_pkg_resources_1/package/__init__.py | 1 +
.../data/path_pkg_resources_1/package/foo.py | 0
.../path_pkg_resources_2/package/__init__.py | 1 +
.../data/path_pkg_resources_2/package/bar.py | 0
.../path_pkg_resources_3/package/__init__.py | 1 +
.../data/path_pkg_resources_3/package/baz.py | 0
.../data/path_pkgutil_1/package/__init__.py | 2 +
.../data/path_pkgutil_1/package/foo.py | 0
.../data/path_pkgutil_2/package/__init__.py | 2 +
.../data/path_pkgutil_2/package/bar.py | 0
.../data/path_pkgutil_3/package/__init__.py | 2 +
.../data/path_pkgutil_3/package/baz.py | 0
.../tests/testdata/python3/data/recursion.py | 3 +
.../testdata/python3/data/tmp__init__.py | 0
.../python3/data/unicode_package/__init__.py | 1 +
.../data/unicode_package/core/__init__.py | 0
.../astroid/astroid/tests/unittest_brain.py | 1415 ++++++
.../astroid/tests/unittest_brain_numpy.py | 391 ++
.../astroid/astroid/tests/unittest_builder.py | 713 +++
.../astroid/astroid/tests/unittest_helpers.py | 254 +
.../astroid/tests/unittest_inference.py | 4481 +++++++++++++++++
.../astroid/astroid/tests/unittest_lookup.py | 341 ++
.../astroid/astroid/tests/unittest_manager.py | 286 ++
.../astroid/tests/unittest_modutils.py | 286 ++
.../astroid/astroid/tests/unittest_nodes.py | 927 ++++
.../astroid/tests/unittest_object_model.py | 577 +++
.../astroid/astroid/tests/unittest_objects.py | 520 ++
.../astroid/tests/unittest_protocols.py | 206 +
.../astroid/astroid/tests/unittest_python3.py | 354 ++
.../astroid/tests/unittest_raw_building.py | 91 +
.../astroid/tests/unittest_regrtest.py | 328 ++
.../astroid/tests/unittest_scoped_nodes.py | 1793 +++++++
.../astroid/tests/unittest_transforms.py | 235 +
.../astroid/astroid/tests/unittest_utils.py | 112 +
.../astroid}/astroid/transforms.py | 9 +-
.../astroid}/astroid/util.py | 32 +-
submodules/astroid/debian.sid/control | 42 +
submodules/astroid/debian.sid/rules | 41 +
submodules/astroid/debian.sid/source/format | 1 +
submodules/astroid/debian/changelog | 30 +
submodules/astroid/debian/compat | 1 +
submodules/astroid/debian/control | 31 +
submodules/astroid/debian/copyright | 36 +
submodules/astroid/debian/python-astroid.dirs | 1 +
submodules/astroid/debian/rules | 66 +
submodules/astroid/debian/watch | 2 +
submodules/astroid/doc/Makefile | 130 +
.../astroid/doc/api/astroid.exceptions.rst | 38 +
submodules/astroid/doc/api/astroid.nodes.rst | 226 +
submodules/astroid/doc/api/base_nodes.rst | 44 +
submodules/astroid/doc/api/general.rst | 5 +
submodules/astroid/doc/api/index.rst | 12 +
submodules/astroid/doc/ast_objects.inv | 11 +
submodules/astroid/doc/changelog.rst | 1 +
submodules/astroid/doc/conf.py | 237 +
submodules/astroid/doc/extending.rst | 250 +
submodules/astroid/doc/index.rst | 35 +
submodules/astroid/doc/inference.rst | 31 +
submodules/astroid/doc/make.bat | 170 +
submodules/astroid/doc/release.txt | 30 +
submodules/astroid/doc/whatsnew.rst | 11 +
submodules/astroid/pylintrc | 386 ++
submodules/astroid/pytest.ini | 3 +
submodules/astroid/setup.cfg | 8 +
submodules/astroid/setup.py | 49 +
submodules/astroid/tox.ini | 75 +
submodules/autopep8 | 2 +-
submodules/mccabe | 2 +-
submodules/pycodestyle | 2 +-
submodules/pydocstyle | 2 +-
submodules/pyflakes | 2 +-
submodules/pylint | 1 +
submodules/rope | 1 +
submodules/snowball_py | 2 +-
540 files changed, 36963 insertions(+), 45885 deletions(-)
create mode 120000 pymode/libs/astroid
delete mode 100644 pymode/libs/astroid/__pkginfo__.py
delete mode 100644 pymode/libs/astroid/astpeephole.py
delete mode 100644 pymode/libs/astroid/brain/brain_numpy.py
delete mode 100644 pymode/libs/astroid/brain/brain_six.py
delete mode 100644 pymode/libs/astroid/brain/brain_typing.py
delete mode 100644 pymode/libs/astroid/node_classes.py
create mode 120000 pymode/libs/logilab
create mode 100644 pymode/libs/logilab-common-1.4.1/COPYING
create mode 100644 pymode/libs/logilab-common-1.4.1/COPYING.LESSER
create mode 100644 pymode/libs/logilab-common-1.4.1/ChangeLog
create mode 100644 pymode/libs/logilab-common-1.4.1/MANIFEST.in
create mode 100644 pymode/libs/logilab-common-1.4.1/PKG-INFO
rename pymode/libs/{logilab_common-1.0.2.dist-info/DESCRIPTION.rst => logilab-common-1.4.1/README} (99%)
create mode 100644 pymode/libs/logilab-common-1.4.1/__pkginfo__.py
create mode 100755 pymode/libs/logilab-common-1.4.1/bin/logilab-pytest
create mode 100644 pymode/libs/logilab-common-1.4.1/bin/logilab-pytest.bat
create mode 100644 pymode/libs/logilab-common-1.4.1/doc/logilab-pytest.1
create mode 100644 pymode/libs/logilab-common-1.4.1/doc/makefile
create mode 100644 pymode/libs/logilab-common-1.4.1/logilab/__init__.py
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/__init__.py (98%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/cache.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/changelog.py (82%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/clcommands.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/compat.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/configuration.py (98%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/daemon.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/date.py (99%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/debugger.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/decorators.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/deprecation.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/fileutils.py (97%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/graph.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/interface.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/logging_ext.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/modutils.py (93%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/optik_ext.py (98%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/optparser.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/proc.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/pytest.py (85%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/registry.py (95%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/shellutils.py (85%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/sphinx_ext.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/sphinxutils.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/table.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/tasksqueue.py (100%)
create mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/testlib.py
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/textutils.py (99%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/tree.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/umessage.py (64%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/ureports/__init__.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/ureports/docbook_writer.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/ureports/html_writer.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/ureports/nodes.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/ureports/text_writer.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/urllib2ext.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/vcgutils.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/visitor.py (100%)
rename pymode/libs/{ => logilab-common-1.4.1}/logilab/common/xmlutils.py (100%)
create mode 100644 pymode/libs/logilab-common-1.4.1/setup.cfg
create mode 100644 pymode/libs/logilab-common-1.4.1/setup.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/ChangeLog
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/MyPyPa-0.1.0.zip
rename pymode/libs/{astroid/interpreter => logilab-common-1.4.1/test/data}/__init__.py (100%)
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/__pkginfo__.py
rename pymode/libs/{astroid/interpreter/_import/__init__.py => logilab-common-1.4.1/test/data/content_differ_dir/NOTHING} (100%)
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/README
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/subdir/coin
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/subdir/toto.txt
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/deprecation.py
rename pymode/libs/{pylint/extensions/__init__.py => logilab-common-1.4.1/test/data/file_differ_dir/NOTHING} (100%)
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/README
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/subdir/toto.txt
rename pymode/libs/{rope/base/oi/type_hinting/__init__.py => logilab-common-1.4.1/test/data/file_differ_dir/subdirtwo/Hello} (100%)
rename pymode/libs/{rope/base/oi/type_hinting/providers => logilab-common-1.4.1/test/data/find_test}/__init__.py (100%)
rename pymode/libs/{rope/base/oi/type_hinting/resolvers/__init__.py => logilab-common-1.4.1/test/data/find_test/foo.txt} (100%)
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/module.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/module2.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/newlines.txt
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/noendingnewline.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/nonregr.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/normal_file.txt
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/spam.txt
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/sub/doc.txt
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/sub/momo.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/test.ini
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/test1.msg
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/test2.msg
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/write_protected_file.txt
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/foo.txt
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/lmfp/__init__.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/lmfp/foo.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/module.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/module2.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/newlines.txt
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/noendingnewline.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/nonregr.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/normal_file.txt
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/reference_dir/NOTHING
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/reference_dir/README
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/reference_dir/subdir/coin
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/reference_dir/subdir/toto.txt
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/regobjects.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/regobjects2.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/same_dir/NOTHING
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/same_dir/README
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/same_dir/subdir/coin
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/same_dir/subdir/toto.txt
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/spam.txt
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/sub/doc.txt
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/sub/momo.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/NOTHING
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/README
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/subdir/coin
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/subdir/toto.txt
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/test.ini
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/test1.msg
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/test2.msg
create mode 100644 pymode/libs/logilab-common-1.4.1/test/data/write_protected_file.txt
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_cache.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_changelog.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_configuration.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_date.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_decorators.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_deprecation.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_fileutils.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_graph.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_interface.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_modutils.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_pytest.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_registry.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_shellutils.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_table.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_taskqueue.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_testlib.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_textutils.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_tree.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_umessage.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_ureports_html.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_ureports_text.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_xmlutils.py
create mode 100644 pymode/libs/logilab-common-1.4.1/test/utils.py
delete mode 100644 pymode/libs/logilab/__init__.py
delete mode 100644 pymode/libs/logilab/common/testlib.py
delete mode 100644 pymode/libs/logilab_common-1.0.2-py2.7-nspkg.pth
delete mode 100644 pymode/libs/logilab_common-1.0.2.dist-info/METADATA
delete mode 100644 pymode/libs/logilab_common-1.0.2.dist-info/RECORD
delete mode 100644 pymode/libs/logilab_common-1.0.2.dist-info/WHEEL
delete mode 100644 pymode/libs/logilab_common-1.0.2.dist-info/metadata.json
delete mode 100644 pymode/libs/logilab_common-1.0.2.dist-info/namespace_packages.txt
delete mode 100644 pymode/libs/logilab_common-1.0.2.dist-info/top_level.txt
create mode 120000 pymode/libs/pylint
delete mode 100644 pymode/libs/pylint/__init__.py
delete mode 100644 pymode/libs/pylint/__main__.py
delete mode 100644 pymode/libs/pylint/__pkginfo__.py
delete mode 100644 pymode/libs/pylint/checkers/__init__.py
delete mode 100644 pymode/libs/pylint/checkers/async.py
delete mode 100644 pymode/libs/pylint/checkers/base.py
delete mode 100644 pymode/libs/pylint/checkers/classes.py
delete mode 100644 pymode/libs/pylint/checkers/design_analysis.py
delete mode 100644 pymode/libs/pylint/checkers/exceptions.py
delete mode 100644 pymode/libs/pylint/checkers/format.py
delete mode 100644 pymode/libs/pylint/checkers/imports.py
delete mode 100644 pymode/libs/pylint/checkers/logging.py
delete mode 100644 pymode/libs/pylint/checkers/misc.py
delete mode 100644 pymode/libs/pylint/checkers/newstyle.py
delete mode 100644 pymode/libs/pylint/checkers/python3.py
delete mode 100644 pymode/libs/pylint/checkers/raw_metrics.py
delete mode 100644 pymode/libs/pylint/checkers/refactoring.py
delete mode 100644 pymode/libs/pylint/checkers/similar.py
delete mode 100644 pymode/libs/pylint/checkers/spelling.py
delete mode 100644 pymode/libs/pylint/checkers/stdlib.py
delete mode 100644 pymode/libs/pylint/checkers/strings.py
delete mode 100644 pymode/libs/pylint/checkers/typecheck.py
delete mode 100644 pymode/libs/pylint/checkers/utils.py
delete mode 100644 pymode/libs/pylint/checkers/variables.py
delete mode 100644 pymode/libs/pylint/config.py
delete mode 100644 pymode/libs/pylint/epylint.py
delete mode 100644 pymode/libs/pylint/exceptions.py
delete mode 100644 pymode/libs/pylint/extensions/_check_docs_utils.py
delete mode 100644 pymode/libs/pylint/extensions/bad_builtin.py
delete mode 100644 pymode/libs/pylint/extensions/check_docs.py
delete mode 100644 pymode/libs/pylint/extensions/check_elif.py
delete mode 100644 pymode/libs/pylint/extensions/comparetozero.py
delete mode 100644 pymode/libs/pylint/extensions/docparams.py
delete mode 100644 pymode/libs/pylint/extensions/docstyle.py
delete mode 100644 pymode/libs/pylint/extensions/emptystring.py
delete mode 100644 pymode/libs/pylint/extensions/mccabe.py
delete mode 100644 pymode/libs/pylint/extensions/overlapping_exceptions.py
delete mode 100644 pymode/libs/pylint/extensions/redefined_variable_type.py
delete mode 100644 pymode/libs/pylint/graph.py
delete mode 100644 pymode/libs/pylint/interfaces.py
delete mode 100644 pymode/libs/pylint/lint.py
delete mode 100644 pymode/libs/pylint/pyreverse/__init__.py
delete mode 100644 pymode/libs/pylint/pyreverse/diadefslib.py
delete mode 100644 pymode/libs/pylint/pyreverse/diagrams.py
delete mode 100644 pymode/libs/pylint/pyreverse/inspector.py
delete mode 100644 pymode/libs/pylint/pyreverse/main.py
delete mode 100644 pymode/libs/pylint/pyreverse/utils.py
delete mode 100644 pymode/libs/pylint/pyreverse/vcgutils.py
delete mode 100644 pymode/libs/pylint/pyreverse/writer.py
delete mode 100644 pymode/libs/pylint/reporters/__init__.py
delete mode 100644 pymode/libs/pylint/reporters/json.py
delete mode 100644 pymode/libs/pylint/reporters/text.py
delete mode 100644 pymode/libs/pylint/reporters/ureports/__init__.py
delete mode 100644 pymode/libs/pylint/reporters/ureports/nodes.py
delete mode 100644 pymode/libs/pylint/reporters/ureports/text_writer.py
delete mode 100644 pymode/libs/pylint/testutils.py
delete mode 100644 pymode/libs/pylint/utils.py
create mode 120000 pymode/libs/rope
delete mode 100644 pymode/libs/rope/__init__.py
delete mode 100644 pymode/libs/rope/base/__init__.py
delete mode 100644 pymode/libs/rope/base/arguments.py
delete mode 100644 pymode/libs/rope/base/ast.py
delete mode 100644 pymode/libs/rope/base/astutils.py
delete mode 100644 pymode/libs/rope/base/builtins.py
delete mode 100644 pymode/libs/rope/base/change.py
delete mode 100644 pymode/libs/rope/base/codeanalyze.py
delete mode 100644 pymode/libs/rope/base/default_config.py
delete mode 100644 pymode/libs/rope/base/evaluate.py
delete mode 100644 pymode/libs/rope/base/exceptions.py
delete mode 100644 pymode/libs/rope/base/fscommands.py
delete mode 100644 pymode/libs/rope/base/history.py
delete mode 100644 pymode/libs/rope/base/libutils.py
delete mode 100644 pymode/libs/rope/base/oi/__init__.py
delete mode 100644 pymode/libs/rope/base/oi/doa.py
delete mode 100644 pymode/libs/rope/base/oi/memorydb.py
delete mode 100644 pymode/libs/rope/base/oi/objectdb.py
delete mode 100644 pymode/libs/rope/base/oi/objectinfo.py
delete mode 100644 pymode/libs/rope/base/oi/runmod.py
delete mode 100644 pymode/libs/rope/base/oi/soa.py
delete mode 100644 pymode/libs/rope/base/oi/soi.py
delete mode 100644 pymode/libs/rope/base/oi/transform.py
delete mode 100644 pymode/libs/rope/base/oi/type_hinting/evaluate.py
delete mode 100644 pymode/libs/rope/base/oi/type_hinting/factory.py
delete mode 100644 pymode/libs/rope/base/oi/type_hinting/interfaces.py
delete mode 100644 pymode/libs/rope/base/oi/type_hinting/providers/composite.py
delete mode 100644 pymode/libs/rope/base/oi/type_hinting/providers/docstrings.py
delete mode 100644 pymode/libs/rope/base/oi/type_hinting/providers/inheritance.py
delete mode 100644 pymode/libs/rope/base/oi/type_hinting/providers/interfaces.py
delete mode 100644 pymode/libs/rope/base/oi/type_hinting/providers/numpydocstrings.py
delete mode 100644 pymode/libs/rope/base/oi/type_hinting/providers/pep0484_type_comments.py
delete mode 100644 pymode/libs/rope/base/oi/type_hinting/resolvers/composite.py
delete mode 100644 pymode/libs/rope/base/oi/type_hinting/resolvers/interfaces.py
delete mode 100644 pymode/libs/rope/base/oi/type_hinting/resolvers/types.py
delete mode 100644 pymode/libs/rope/base/oi/type_hinting/utils.py
delete mode 100644 pymode/libs/rope/base/prefs.py
delete mode 100644 pymode/libs/rope/base/project.py
delete mode 100644 pymode/libs/rope/base/pycore.py
delete mode 100644 pymode/libs/rope/base/pynames.py
delete mode 100644 pymode/libs/rope/base/pynamesdef.py
delete mode 100644 pymode/libs/rope/base/pyobjects.py
delete mode 100644 pymode/libs/rope/base/pyobjectsdef.py
delete mode 100644 pymode/libs/rope/base/pyscopes.py
delete mode 100644 pymode/libs/rope/base/resourceobserver.py
delete mode 100644 pymode/libs/rope/base/resources.py
delete mode 100644 pymode/libs/rope/base/simplify.py
delete mode 100644 pymode/libs/rope/base/stdmods.py
delete mode 100644 pymode/libs/rope/base/taskhandle.py
delete mode 100644 pymode/libs/rope/base/utils/__init__.py
delete mode 100644 pymode/libs/rope/base/utils/datastructures.py
delete mode 100644 pymode/libs/rope/base/utils/pycompat.py
delete mode 100644 pymode/libs/rope/base/worder.py
delete mode 100644 pymode/libs/rope/contrib/__init__.py
delete mode 100644 pymode/libs/rope/contrib/autoimport.py
delete mode 100644 pymode/libs/rope/contrib/changestack.py
delete mode 100644 pymode/libs/rope/contrib/codeassist.py
delete mode 100644 pymode/libs/rope/contrib/finderrors.py
delete mode 100644 pymode/libs/rope/contrib/findit.py
delete mode 100644 pymode/libs/rope/contrib/fixmodnames.py
delete mode 100644 pymode/libs/rope/contrib/fixsyntax.py
delete mode 100644 pymode/libs/rope/contrib/generate.py
delete mode 100644 pymode/libs/rope/refactor/__init__.py
delete mode 100644 pymode/libs/rope/refactor/change_signature.py
delete mode 100644 pymode/libs/rope/refactor/encapsulate_field.py
delete mode 100644 pymode/libs/rope/refactor/extract.py
delete mode 100644 pymode/libs/rope/refactor/functionutils.py
delete mode 100644 pymode/libs/rope/refactor/importutils/__init__.py
delete mode 100644 pymode/libs/rope/refactor/importutils/actions.py
delete mode 100644 pymode/libs/rope/refactor/importutils/importinfo.py
delete mode 100644 pymode/libs/rope/refactor/importutils/module_imports.py
delete mode 100644 pymode/libs/rope/refactor/inline.py
delete mode 100644 pymode/libs/rope/refactor/introduce_factory.py
delete mode 100644 pymode/libs/rope/refactor/introduce_parameter.py
delete mode 100644 pymode/libs/rope/refactor/localtofield.py
delete mode 100644 pymode/libs/rope/refactor/method_object.py
delete mode 100644 pymode/libs/rope/refactor/move.py
delete mode 100644 pymode/libs/rope/refactor/multiproject.py
delete mode 100644 pymode/libs/rope/refactor/occurrences.py
delete mode 100644 pymode/libs/rope/refactor/patchedast.py
delete mode 100644 pymode/libs/rope/refactor/rename.py
delete mode 100644 pymode/libs/rope/refactor/restructure.py
delete mode 100644 pymode/libs/rope/refactor/similarfinder.py
delete mode 100644 pymode/libs/rope/refactor/sourceutils.py
delete mode 100644 pymode/libs/rope/refactor/suites.py
delete mode 100644 pymode/libs/rope/refactor/topackage.py
delete mode 100644 pymode/libs/rope/refactor/usefunction.py
delete mode 100644 pymode/libs/rope/refactor/wildcards.py
create mode 100644 submodules/astroid/.coveragerc
create mode 100644 submodules/astroid/.github/ISSUE_TEMPLATE.md
create mode 100644 submodules/astroid/.github/PULL_REQUEST_TEMPLATE.md
create mode 100644 submodules/astroid/.gitignore
create mode 100644 submodules/astroid/.travis.yml
create mode 100644 submodules/astroid/COPYING
create mode 100644 submodules/astroid/COPYING.LESSER
create mode 100644 submodules/astroid/ChangeLog
create mode 100644 submodules/astroid/MANIFEST.in
create mode 100644 submodules/astroid/README.rst
create mode 100644 submodules/astroid/appveyor.yml
create mode 100644 submodules/astroid/appveyor/install.ps1
rename {pymode/libs => submodules/astroid}/astroid/__init__.py (70%)
create mode 100644 submodules/astroid/astroid/__pkginfo__.py
create mode 100644 submodules/astroid/astroid/_ast.py
rename {pymode/libs => submodules/astroid}/astroid/arguments.py (98%)
rename {pymode/libs => submodules/astroid}/astroid/as_string.py (99%)
rename {pymode/libs => submodules/astroid}/astroid/bases.py (83%)
create mode 100644 submodules/astroid/astroid/brain/brain_attrs.py
rename {pymode/libs => submodules/astroid}/astroid/brain/brain_builtin_inference.py (58%)
rename {pymode/libs => submodules/astroid}/astroid/brain/brain_collections.py (51%)
create mode 100644 submodules/astroid/astroid/brain/brain_curses.py
rename {pymode/libs => submodules/astroid}/astroid/brain/brain_dateutil.py (100%)
rename {pymode/libs => submodules/astroid}/astroid/brain/brain_fstrings.py (97%)
rename {pymode/libs => submodules/astroid}/astroid/brain/brain_functools.py (96%)
rename {pymode/libs => submodules/astroid}/astroid/brain/brain_gi.py (100%)
rename {pymode/libs => submodules/astroid}/astroid/brain/brain_hashlib.py (78%)
rename {pymode/libs => submodules/astroid}/astroid/brain/brain_io.py (100%)
rename {pymode/libs => submodules/astroid}/astroid/brain/brain_mechanize.py (100%)
rename {pymode/libs => submodules/astroid}/astroid/brain/brain_multiprocessing.py (100%)
rename {pymode/libs => submodules/astroid}/astroid/brain/brain_namedtuple_enum.py (55%)
rename {pymode/libs => submodules/astroid}/astroid/brain/brain_nose.py (100%)
create mode 100644 submodules/astroid/astroid/brain/brain_numpy.py
rename {pymode/libs => submodules/astroid}/astroid/brain/brain_pkg_resources.py (98%)
rename {pymode/libs => submodules/astroid}/astroid/brain/brain_pytest.py (100%)
rename {pymode/libs => submodules/astroid}/astroid/brain/brain_qt.py (68%)
create mode 100644 submodules/astroid/astroid/brain/brain_random.py
rename {pymode/libs => submodules/astroid}/astroid/brain/brain_re.py (100%)
create mode 100644 submodules/astroid/astroid/brain/brain_six.py
rename {pymode/libs => submodules/astroid}/astroid/brain/brain_ssl.py (100%)
rename {pymode/libs => submodules/astroid}/astroid/brain/brain_subprocess.py (93%)
rename {pymode/libs => submodules/astroid}/astroid/brain/brain_threading.py (100%)
create mode 100644 submodules/astroid/astroid/brain/brain_typing.py
create mode 100644 submodules/astroid/astroid/brain/brain_uuid.py
rename {pymode/libs => submodules/astroid}/astroid/builder.py (88%)
rename {pymode/libs => submodules/astroid}/astroid/context.py (53%)
rename {pymode/libs => submodules/astroid}/astroid/decorators.py (93%)
rename {pymode/libs => submodules/astroid}/astroid/exceptions.py (90%)
rename {pymode/libs => submodules/astroid}/astroid/helpers.py (55%)
rename {pymode/libs => submodules/astroid}/astroid/inference.py (87%)
create mode 100644 submodules/astroid/astroid/interpreter/__init__.py
create mode 100644 submodules/astroid/astroid/interpreter/_import/__init__.py
rename {pymode/libs => submodules/astroid}/astroid/interpreter/_import/spec.py (94%)
rename {pymode/libs => submodules/astroid}/astroid/interpreter/_import/util.py (76%)
rename {pymode/libs => submodules/astroid}/astroid/interpreter/dunder_lookup.py (100%)
rename {pymode/libs => submodules/astroid}/astroid/interpreter/objectmodel.py (86%)
rename {pymode/libs => submodules/astroid}/astroid/manager.py (88%)
rename {pymode/libs => submodules/astroid}/astroid/mixins.py (73%)
rename {pymode/libs => submodules/astroid}/astroid/modutils.py (91%)
create mode 100644 submodules/astroid/astroid/node_classes.py
rename {pymode/libs => submodules/astroid}/astroid/nodes.py (70%)
rename {pymode/libs => submodules/astroid}/astroid/objects.py (94%)
rename {pymode/libs => submodules/astroid}/astroid/protocols.py (89%)
rename {pymode/libs => submodules/astroid}/astroid/raw_building.py (97%)
rename {pymode/libs => submodules/astroid}/astroid/rebuilder.py (84%)
rename {pymode/libs => submodules/astroid}/astroid/scoped_nodes.py (59%)
rename {pymode/libs => submodules/astroid}/astroid/test_utils.py (91%)
create mode 100644 submodules/astroid/astroid/tests/__init__.py
create mode 100644 submodules/astroid/astroid/tests/resources.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/MyPyPa-0.1.0-py2.5.zip
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/SSL1/Connection1.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/SSL1/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/absimp/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/absimp/sidepackage/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/absimp/string.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/absimport.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/all.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/appl/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/appl/myConnection.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/contribute_to_namespace/namespace_pep_420/submodule.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/descriptor_crash.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/email.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/find_test/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/find_test/module.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/find_test/module2.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/find_test/noendingnewline.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/find_test/nonregr.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/foogle/fax/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/foogle/fax/a.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/foogle_fax-0.12.5-py2.7-nspkg.pth
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/format.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/invalid_encoding.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/lmfp/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/lmfp/foo.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/module.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/module1abs/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/module1abs/core.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/module2.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/namespace_pep_420/module.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/noendingnewline.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/nonregr.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/notall.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/notamodule/file.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/package/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/package/absimport.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/package/hello.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/package/import_package_subpackage_module.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/package/subpackage/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/package/subpackage/module.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/path_pkg_resources_1/package/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/path_pkg_resources_1/package/foo.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/path_pkg_resources_2/package/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/path_pkg_resources_2/package/bar.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/path_pkg_resources_3/package/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/path_pkg_resources_3/package/baz.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/path_pkgutil_1/package/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/path_pkgutil_1/package/foo.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/path_pkgutil_2/package/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/path_pkgutil_2/package/bar.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/path_pkgutil_3/package/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/path_pkgutil_3/package/baz.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/recursion.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/tmp__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/unicode_package/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python2/data/unicode_package/core/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/MyPyPa-0.1.0-py2.5.zip
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/SSL1/Connection1.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/SSL1/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/absimp/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/absimp/sidepackage/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/absimp/string.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/absimport.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/all.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/appl/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/appl/myConnection.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/contribute_to_namespace/namespace_pep_420/submodule.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/descriptor_crash.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/email.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/find_test/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/find_test/module.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/find_test/module2.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/find_test/noendingnewline.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/find_test/nonregr.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/foogle/fax/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/foogle/fax/a.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/foogle_fax-0.12.5-py2.7-nspkg.pth
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/format.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/invalid_encoding.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/lmfp/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/lmfp/foo.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/module.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/module1abs/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/module1abs/core.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/module2.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/namespace_pep_420/module.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/noendingnewline.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/nonregr.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/notall.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/notamodule/file.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/package/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/package/absimport.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/package/hello.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/package/import_package_subpackage_module.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/package/subpackage/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/package/subpackage/module.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/path_pkg_resources_1/package/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/path_pkg_resources_1/package/foo.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/path_pkg_resources_2/package/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/path_pkg_resources_2/package/bar.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/path_pkg_resources_3/package/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/path_pkg_resources_3/package/baz.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/path_pkgutil_1/package/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/path_pkgutil_1/package/foo.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/path_pkgutil_2/package/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/path_pkgutil_2/package/bar.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/path_pkgutil_3/package/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/path_pkgutil_3/package/baz.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/recursion.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/tmp__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/unicode_package/__init__.py
create mode 100644 submodules/astroid/astroid/tests/testdata/python3/data/unicode_package/core/__init__.py
create mode 100644 submodules/astroid/astroid/tests/unittest_brain.py
create mode 100644 submodules/astroid/astroid/tests/unittest_brain_numpy.py
create mode 100644 submodules/astroid/astroid/tests/unittest_builder.py
create mode 100644 submodules/astroid/astroid/tests/unittest_helpers.py
create mode 100644 submodules/astroid/astroid/tests/unittest_inference.py
create mode 100644 submodules/astroid/astroid/tests/unittest_lookup.py
create mode 100644 submodules/astroid/astroid/tests/unittest_manager.py
create mode 100644 submodules/astroid/astroid/tests/unittest_modutils.py
create mode 100644 submodules/astroid/astroid/tests/unittest_nodes.py
create mode 100644 submodules/astroid/astroid/tests/unittest_object_model.py
create mode 100644 submodules/astroid/astroid/tests/unittest_objects.py
create mode 100644 submodules/astroid/astroid/tests/unittest_protocols.py
create mode 100644 submodules/astroid/astroid/tests/unittest_python3.py
create mode 100644 submodules/astroid/astroid/tests/unittest_raw_building.py
create mode 100644 submodules/astroid/astroid/tests/unittest_regrtest.py
create mode 100644 submodules/astroid/astroid/tests/unittest_scoped_nodes.py
create mode 100644 submodules/astroid/astroid/tests/unittest_transforms.py
create mode 100644 submodules/astroid/astroid/tests/unittest_utils.py
rename {pymode/libs => submodules/astroid}/astroid/transforms.py (90%)
rename {pymode/libs => submodules/astroid}/astroid/util.py (82%)
create mode 100644 submodules/astroid/debian.sid/control
create mode 100755 submodules/astroid/debian.sid/rules
create mode 100644 submodules/astroid/debian.sid/source/format
create mode 100644 submodules/astroid/debian/changelog
create mode 100644 submodules/astroid/debian/compat
create mode 100644 submodules/astroid/debian/control
create mode 100644 submodules/astroid/debian/copyright
create mode 100644 submodules/astroid/debian/python-astroid.dirs
create mode 100755 submodules/astroid/debian/rules
create mode 100644 submodules/astroid/debian/watch
create mode 100644 submodules/astroid/doc/Makefile
create mode 100644 submodules/astroid/doc/api/astroid.exceptions.rst
create mode 100644 submodules/astroid/doc/api/astroid.nodes.rst
create mode 100644 submodules/astroid/doc/api/base_nodes.rst
create mode 100644 submodules/astroid/doc/api/general.rst
create mode 100644 submodules/astroid/doc/api/index.rst
create mode 100644 submodules/astroid/doc/ast_objects.inv
create mode 100644 submodules/astroid/doc/changelog.rst
create mode 100644 submodules/astroid/doc/conf.py
create mode 100644 submodules/astroid/doc/extending.rst
create mode 100644 submodules/astroid/doc/index.rst
create mode 100644 submodules/astroid/doc/inference.rst
create mode 100644 submodules/astroid/doc/make.bat
create mode 100644 submodules/astroid/doc/release.txt
create mode 100644 submodules/astroid/doc/whatsnew.rst
create mode 100644 submodules/astroid/pylintrc
create mode 100644 submodules/astroid/pytest.ini
create mode 100644 submodules/astroid/setup.cfg
create mode 100644 submodules/astroid/setup.py
create mode 100644 submodules/astroid/tox.ini
create mode 160000 submodules/pylint
create mode 160000 submodules/rope
diff --git a/.gitmodules b/.gitmodules
index 5a82bbf1..5963b5d1 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -26,3 +26,12 @@
path = submodules/pylama
url = https://github.com/fmv1992/pylama
ignore = dirty
+[submodule "submodules/pylint"]
+ path = submodules/pylint
+ url = https://github.com/PyCQA/pylint
+[submodule "submodules/rope"]
+ path = submodules/rope
+ url = https://github.com/python-rope/rope
+[submodule "pymode/submodules/astroid"]
+ path = pymode/submodules/astroid
+ url = https://github.com/PyCQA/astroid
diff --git a/pymode/libs/astroid b/pymode/libs/astroid
new file mode 120000
index 00000000..492d8fbc
--- /dev/null
+++ b/pymode/libs/astroid
@@ -0,0 +1 @@
+../../submodules/astroid/astroid
\ No newline at end of file
diff --git a/pymode/libs/astroid/__pkginfo__.py b/pymode/libs/astroid/__pkginfo__.py
deleted file mode 100644
index 96c2dcba..00000000
--- a/pymode/libs/astroid/__pkginfo__.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2014-2016 Claudiu Popa
-# Copyright (c) 2014 Google, Inc.
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""astroid packaging information"""
-
-from sys import version_info as py_version
-
-from pkg_resources import parse_version
-from setuptools import __version__ as setuptools_version
-
-distname = 'astroid'
-
-modname = 'astroid'
-
-version = '1.5.3'
-numversion = tuple(map(int, version.split('.')))
-
-extras_require = {}
-install_requires = ['lazy_object_proxy', 'six', 'wrapt']
-
-
-def has_environment_marker_range_operators_support():
- """Code extracted from 'pytest/setup.py'
- https://github.com/pytest-dev/pytest/blob/7538680c/setup.py#L31
-
- The first known release to support environment marker with range operators
- it is 17.1, see: https://setuptools.readthedocs.io/en/latest/history.html#id113
- """
- return parse_version(setuptools_version) >= parse_version('17.1')
-
-
-if has_environment_marker_range_operators_support():
- extras_require[':python_version<"3.4"'] = ['enum34>=1.1.3', 'singledispatch']
- extras_require[':python_version<"3.3"'] = ['backports.functools_lru_cache']
-else:
- if py_version < (3, 4):
- install_requires.extend(['enum34', 'singledispatch'])
- if py_version < (3, 3):
- install_requires.append('backports.functools_lru_cache')
-
-
-# pylint: disable=redefined-builtin; why license is a builtin anyway?
-license = 'LGPL'
-
-author = 'Python Code Quality Authority'
-author_email = 'code-quality@python.org'
-mailinglist = "mailto://%s" % author_email
-web = 'https://github.com/PyCQA/astroid'
-
-description = "A abstract syntax tree for Python with inference support."
-
-classifiers = ["Topic :: Software Development :: Libraries :: Python Modules",
- "Topic :: Software Development :: Quality Assurance",
- "Programming Language :: Python",
- "Programming Language :: Python :: 2",
- "Programming Language :: Python :: 3",
- ]
diff --git a/pymode/libs/astroid/astpeephole.py b/pymode/libs/astroid/astpeephole.py
deleted file mode 100644
index dde5dca1..00000000
--- a/pymode/libs/astroid/astpeephole.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# Copyright (c) 2015-2016 Claudiu Popa
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""Small AST optimizations."""
-
-import _ast
-
-from astroid import nodes
-
-
-__all__ = ('ASTPeepholeOptimizer', )
-
-
-try:
- _TYPES = (_ast.Str, _ast.Bytes)
-except AttributeError:
- _TYPES = (_ast.Str, )
-
-
-class ASTPeepholeOptimizer(object):
- """Class for applying small optimizations to generate new AST."""
-
- def optimize_binop(self, node, parent=None):
- """Optimize BinOps with string Const nodes on the lhs.
-
- This fixes an infinite recursion crash, where multiple
- strings are joined using the addition operator. With a
- sufficient number of such strings, astroid will fail
- with a maximum recursion limit exceeded. The
- function will return a Const node with all the strings
- already joined.
- Return ``None`` if no AST node can be obtained
- through optimization.
- """
- ast_nodes = []
- current = node
- while isinstance(current, _ast.BinOp):
- # lhs must be a BinOp with the addition operand.
- if not isinstance(current.left, _ast.BinOp):
- return
- if (not isinstance(current.left.op, _ast.Add)
- or not isinstance(current.op, _ast.Add)):
- return
-
- # rhs must a str / bytes.
- if not isinstance(current.right, _TYPES):
- return
-
- ast_nodes.append(current.right.s)
- current = current.left
-
- if (isinstance(current, _ast.BinOp)
- and isinstance(current.left, _TYPES)
- and isinstance(current.right, _TYPES)):
- # Stop early if we are at the last BinOp in
- # the operation
- ast_nodes.append(current.right.s)
- ast_nodes.append(current.left.s)
- break
-
- if not ast_nodes:
- return
-
- # If we have inconsistent types, bail out.
- known = type(ast_nodes[0])
- if any(not isinstance(element, known)
- for element in ast_nodes[1:]):
- return
-
- value = known().join(reversed(ast_nodes))
- newnode = nodes.Const(value, node.lineno, node.col_offset, parent)
- return newnode
diff --git a/pymode/libs/astroid/brain/brain_numpy.py b/pymode/libs/astroid/brain/brain_numpy.py
deleted file mode 100644
index 8acfe053..00000000
--- a/pymode/libs/astroid/brain/brain_numpy.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright (c) 2015-2016 Claudiu Popa
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-
-"""Astroid hooks for numpy."""
-
-import astroid
-
-
-# TODO(cpopa): drop when understanding augmented assignments
-
-def numpy_core_transform():
- return astroid.parse('''
- from numpy.core import numeric
- from numpy.core import fromnumeric
- from numpy.core import defchararray
- from numpy.core import records
- from numpy.core import function_base
- from numpy.core import machar
- from numpy.core import getlimits
- from numpy.core import shape_base
- __all__ = (['char', 'rec', 'memmap', 'chararray'] + numeric.__all__ +
- fromnumeric.__all__ +
- records.__all__ +
- function_base.__all__ +
- machar.__all__ +
- getlimits.__all__ +
- shape_base.__all__)
- ''')
-
-
-def numpy_transform():
- return astroid.parse('''
- from numpy import core
- from numpy import matrixlib as _mat
- from numpy import lib
- __all__ = ['add_newdocs',
- 'ModuleDeprecationWarning',
- 'VisibleDeprecationWarning', 'linalg', 'fft', 'random',
- 'ctypeslib', 'ma',
- '__version__', 'pkgload', 'PackageLoader',
- 'show_config'] + core.__all__ + _mat.__all__ + lib.__all__
-
- ''')
-
-
-astroid.register_module_extender(astroid.MANAGER, 'numpy.core', numpy_core_transform)
-astroid.register_module_extender(astroid.MANAGER, 'numpy', numpy_transform)
diff --git a/pymode/libs/astroid/brain/brain_six.py b/pymode/libs/astroid/brain/brain_six.py
deleted file mode 100644
index f16a2938..00000000
--- a/pymode/libs/astroid/brain/brain_six.py
+++ /dev/null
@@ -1,276 +0,0 @@
-# Copyright (c) 2014-2016 Claudiu Popa
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-
-"""Astroid hooks for six module."""
-
-import sys
-from textwrap import dedent
-
-from astroid import MANAGER, register_module_extender
-from astroid.builder import AstroidBuilder
-from astroid.exceptions import AstroidBuildingError, InferenceError
-from astroid import nodes
-
-
-SIX_ADD_METACLASS = 'six.add_metaclass'
-
-
-def _indent(text, prefix, predicate=None):
- """Adds 'prefix' to the beginning of selected lines in 'text'.
-
- If 'predicate' is provided, 'prefix' will only be added to the lines
- where 'predicate(line)' is True. If 'predicate' is not provided,
- it will default to adding 'prefix' to all non-empty lines that do not
- consist solely of whitespace characters.
- """
- if predicate is None:
- predicate = lambda line: line.strip()
-
- def prefixed_lines():
- for line in text.splitlines(True):
- yield prefix + line if predicate(line) else line
- return ''.join(prefixed_lines())
-
-
-if sys.version_info[0] == 2:
- _IMPORTS_2 = """
- import BaseHTTPServer
- import CGIHTTPServer
- import SimpleHTTPServer
-
- from StringIO import StringIO
- from cStringIO import StringIO as cStringIO
- from UserDict import UserDict
- from UserList import UserList
- from UserString import UserString
-
- import __builtin__ as builtins
- import thread as _thread
- import dummy_thread as _dummy_thread
- import ConfigParser as configparser
- import copy_reg as copyreg
- from itertools import (imap as map,
- ifilter as filter,
- ifilterfalse as filterfalse,
- izip_longest as zip_longest,
- izip as zip)
- import htmlentitydefs as html_entities
- import HTMLParser as html_parser
- import httplib as http_client
- import cookielib as http_cookiejar
- import Cookie as http_cookies
- import Queue as queue
- import repr as reprlib
- from pipes import quote as shlex_quote
- import SocketServer as socketserver
- import SimpleXMLRPCServer as xmlrpc_server
- import xmlrpclib as xmlrpc_client
- import _winreg as winreg
- import robotparser as urllib_robotparser
- import Tkinter as tkinter
- import tkFileDialog as tkinter_tkfiledialog
-
- input = raw_input
- intern = intern
- range = xrange
- xrange = xrange
- reduce = reduce
- reload_module = reload
-
- class UrllibParse(object):
- import urlparse as _urlparse
- import urllib as _urllib
- ParseResult = _urlparse.ParseResult
- SplitResult = _urlparse.SplitResult
- parse_qs = _urlparse.parse_qs
- parse_qsl = _urlparse.parse_qsl
- urldefrag = _urlparse.urldefrag
- urljoin = _urlparse.urljoin
- urlparse = _urlparse.urlparse
- urlsplit = _urlparse.urlsplit
- urlunparse = _urlparse.urlunparse
- urlunsplit = _urlparse.urlunsplit
- quote = _urllib.quote
- quote_plus = _urllib.quote_plus
- unquote = _urllib.unquote
- unquote_plus = _urllib.unquote_plus
- urlencode = _urllib.urlencode
- splitquery = _urllib.splitquery
- splittag = _urllib.splittag
- splituser = _urllib.splituser
- uses_fragment = _urlparse.uses_fragment
- uses_netloc = _urlparse.uses_netloc
- uses_params = _urlparse.uses_params
- uses_query = _urlparse.uses_query
- uses_relative = _urlparse.uses_relative
-
- class UrllibError(object):
- import urllib2 as _urllib2
- import urllib as _urllib
- URLError = _urllib2.URLError
- HTTPError = _urllib2.HTTPError
- ContentTooShortError = _urllib.ContentTooShortError
-
- class DummyModule(object):
- pass
-
- class UrllibRequest(object):
- import urlparse as _urlparse
- import urllib2 as _urllib2
- import urllib as _urllib
- urlopen = _urllib2.urlopen
- install_opener = _urllib2.install_opener
- build_opener = _urllib2.build_opener
- pathname2url = _urllib.pathname2url
- url2pathname = _urllib.url2pathname
- getproxies = _urllib.getproxies
- Request = _urllib2.Request
- OpenerDirector = _urllib2.OpenerDirector
- HTTPDefaultErrorHandler = _urllib2.HTTPDefaultErrorHandler
- HTTPRedirectHandler = _urllib2.HTTPRedirectHandler
- HTTPCookieProcessor = _urllib2.HTTPCookieProcessor
- ProxyHandler = _urllib2.ProxyHandler
- BaseHandler = _urllib2.BaseHandler
- HTTPPasswordMgr = _urllib2.HTTPPasswordMgr
- HTTPPasswordMgrWithDefaultRealm = _urllib2.HTTPPasswordMgrWithDefaultRealm
- AbstractBasicAuthHandler = _urllib2.AbstractBasicAuthHandler
- HTTPBasicAuthHandler = _urllib2.HTTPBasicAuthHandler
- ProxyBasicAuthHandler = _urllib2.ProxyBasicAuthHandler
- AbstractDigestAuthHandler = _urllib2.AbstractDigestAuthHandler
- HTTPDigestAuthHandler = _urllib2.HTTPDigestAuthHandler
- ProxyDigestAuthHandler = _urllib2.ProxyDigestAuthHandler
- HTTPHandler = _urllib2.HTTPHandler
- HTTPSHandler = _urllib2.HTTPSHandler
- FileHandler = _urllib2.FileHandler
- FTPHandler = _urllib2.FTPHandler
- CacheFTPHandler = _urllib2.CacheFTPHandler
- UnknownHandler = _urllib2.UnknownHandler
- HTTPErrorProcessor = _urllib2.HTTPErrorProcessor
- urlretrieve = _urllib.urlretrieve
- urlcleanup = _urllib.urlcleanup
- proxy_bypass = _urllib.proxy_bypass
-
- urllib_parse = UrllibParse()
- urllib_error = UrllibError()
- urllib = DummyModule()
- urllib.request = UrllibRequest()
- urllib.parse = UrllibParse()
- urllib.error = UrllibError()
- """
-else:
- _IMPORTS_3 = """
- import _io
- cStringIO = _io.StringIO
- filter = filter
- from itertools import filterfalse
- input = input
- from sys import intern
- map = map
- range = range
- from imp import reload as reload_module
- from functools import reduce
- from shlex import quote as shlex_quote
- from io import StringIO
- from collections import UserDict, UserList, UserString
- xrange = range
- zip = zip
- from itertools import zip_longest
- import builtins
- import configparser
- import copyreg
- import _dummy_thread
- import http.cookiejar as http_cookiejar
- import http.cookies as http_cookies
- import html.entities as html_entities
- import html.parser as html_parser
- import http.client as http_client
- import http.server
- BaseHTTPServer = CGIHTTPServer = SimpleHTTPServer = http.server
- import pickle as cPickle
- import queue
- import reprlib
- import socketserver
- import _thread
- import winreg
- import xmlrpc.server as xmlrpc_server
- import xmlrpc.client as xmlrpc_client
- import urllib.robotparser as urllib_robotparser
- import email.mime.multipart as email_mime_multipart
- import email.mime.nonmultipart as email_mime_nonmultipart
- import email.mime.text as email_mime_text
- import email.mime.base as email_mime_base
- import urllib.parse as urllib_parse
- import urllib.error as urllib_error
- import tkinter
- import tkinter.dialog as tkinter_dialog
- import tkinter.filedialog as tkinter_filedialog
- import tkinter.scrolledtext as tkinter_scrolledtext
- import tkinter.simpledialog as tkinder_simpledialog
- import tkinter.tix as tkinter_tix
- import tkinter.ttk as tkinter_ttk
- import tkinter.constants as tkinter_constants
- import tkinter.dnd as tkinter_dnd
- import tkinter.colorchooser as tkinter_colorchooser
- import tkinter.commondialog as tkinter_commondialog
- import tkinter.filedialog as tkinter_tkfiledialog
- import tkinter.font as tkinter_font
- import tkinter.messagebox as tkinter_messagebox
- import urllib.request
- import urllib.robotparser as urllib_robotparser
- import urllib.parse as urllib_parse
- import urllib.error as urllib_error
- """
-if sys.version_info[0] == 2:
- _IMPORTS = dedent(_IMPORTS_2)
-else:
- _IMPORTS = dedent(_IMPORTS_3)
-
-
-def six_moves_transform():
- code = dedent('''
- class Moves(object):
- {}
- moves = Moves()
- ''').format(_indent(_IMPORTS, " "))
- module = AstroidBuilder(MANAGER).string_build(code)
- module.name = 'six.moves'
- return module
-
-
-def _six_fail_hook(modname):
- if modname != 'six.moves':
- raise AstroidBuildingError(modname=modname)
- module = AstroidBuilder(MANAGER).string_build(_IMPORTS)
- module.name = 'six.moves'
- return module
-
-def transform_six_add_metaclass(node):
- """Check if the given class node is decorated with *six.add_metaclass*
-
- If so, inject its argument as the metaclass of the underlying class.
- """
- if not node.decorators:
- return
-
- for decorator in node.decorators.nodes:
- if not isinstance(decorator, nodes.Call):
- continue
-
- try:
- func = next(decorator.func.infer())
- except InferenceError:
- continue
- if func.qname() == SIX_ADD_METACLASS and decorator.args:
- metaclass = decorator.args[0]
- node._metaclass = metaclass
- return node
-
-
-register_module_extender(MANAGER, 'six', six_moves_transform)
-register_module_extender(MANAGER, 'requests.packages.urllib3.packages.six',
- six_moves_transform)
-MANAGER.register_failed_import_hook(_six_fail_hook)
-MANAGER.register_transform(nodes.ClassDef, transform_six_add_metaclass)
diff --git a/pymode/libs/astroid/brain/brain_typing.py b/pymode/libs/astroid/brain/brain_typing.py
deleted file mode 100644
index 711066cb..00000000
--- a/pymode/libs/astroid/brain/brain_typing.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Copyright (c) 2016 David Euresti
-
-"""Astroid hooks for typing.py support."""
-import textwrap
-
-from astroid import (
- MANAGER, UseInferenceDefault, extract_node, inference_tip,
- nodes, InferenceError)
-from astroid.nodes import List, Tuple
-
-
-TYPING_NAMEDTUPLE_BASENAMES = {
- 'NamedTuple',
- 'typing.NamedTuple'
-}
-
-
-def infer_typing_namedtuple(node, context=None):
- """Infer a typing.NamedTuple(...) call."""
- # This is essentially a namedtuple with different arguments
- # so we extract the args and infer a named tuple.
- try:
- func = next(node.func.infer())
- except InferenceError:
- raise UseInferenceDefault
-
- if func.qname() != 'typing.NamedTuple':
- raise UseInferenceDefault
-
- if len(node.args) != 2:
- raise UseInferenceDefault
-
- if not isinstance(node.args[1], (List, Tuple)):
- raise UseInferenceDefault
-
- names = []
- for elt in node.args[1].elts:
- if not isinstance(elt, (List, Tuple)):
- raise UseInferenceDefault
- if len(elt.elts) != 2:
- raise UseInferenceDefault
- names.append(elt.elts[0].as_string())
-
- typename = node.args[0].as_string()
- node = extract_node('namedtuple(%(typename)s, (%(fields)s,)) ' %
- {'typename': typename, 'fields': ",".join(names)})
- return node.infer(context=context)
-
-
-def infer_typing_namedtuple_class(node, context=None):
- """Infer a subclass of typing.NamedTuple"""
-
- # Check if it has the corresponding bases
- if not set(node.basenames) & TYPING_NAMEDTUPLE_BASENAMES:
- raise UseInferenceDefault
-
- annassigns_fields = [
- annassign.target.name for annassign in node.body
- if isinstance(annassign, nodes.AnnAssign)
- ]
- code = textwrap.dedent('''
- from collections import namedtuple
- namedtuple({typename!r}, {fields!r})
- ''').format(
- typename=node.name,
- fields=",".join(annassigns_fields)
- )
- node = extract_node(code)
- return node.infer(context=context)
-
-
-def looks_like_typing_namedtuple(node):
- func = node.func
- if isinstance(func, nodes.Attribute):
- return func.attrname == 'NamedTuple'
- if isinstance(func, nodes.Name):
- return func.name == 'NamedTuple'
- return False
-
-
-MANAGER.register_transform(
- nodes.Call,
- inference_tip(infer_typing_namedtuple),
- looks_like_typing_namedtuple
-)
-MANAGER.register_transform(
- nodes.ClassDef,
- inference_tip(infer_typing_namedtuple_class)
-)
diff --git a/pymode/libs/astroid/node_classes.py b/pymode/libs/astroid/node_classes.py
deleted file mode 100644
index b4e71874..00000000
--- a/pymode/libs/astroid/node_classes.py
+++ /dev/null
@@ -1,2082 +0,0 @@
-# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2013-2014, 2016 Google, Inc.
-# Copyright (c) 2014-2016 Claudiu Popa
-# Copyright (c) 2015-2016 Cara Vinson
-# Copyright (c) 2016 Jakub Wilk
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""Module for some node classes. More nodes in scoped_nodes.py
-"""
-
-import abc
-import pprint
-import warnings
-try:
- from functools import singledispatch as _singledispatch
-except ImportError:
- from singledispatch import singledispatch as _singledispatch
-
-import six
-
-from astroid import as_string
-from astroid import bases
-from astroid import context as contextmod
-from astroid import decorators
-from astroid import exceptions
-from astroid import manager
-from astroid import mixins
-from astroid import util
-
-
-BUILTINS = six.moves.builtins.__name__
-MANAGER = manager.AstroidManager()
-
-
-@decorators.raise_if_nothing_inferred
-def unpack_infer(stmt, context=None):
- """recursively generate nodes inferred by the given statement.
- If the inferred value is a list or a tuple, recurse on the elements
- """
- if isinstance(stmt, (List, Tuple)):
- for elt in stmt.elts:
- if elt is util.Uninferable:
- yield elt
- continue
- for inferred_elt in unpack_infer(elt, context):
- yield inferred_elt
- # Explicit StopIteration to return error information, see comment
- # in raise_if_nothing_inferred.
- raise StopIteration(dict(node=stmt, context=context))
- # if inferred is a final node, return it and stop
- inferred = next(stmt.infer(context))
- if inferred is stmt:
- yield inferred
- # Explicit StopIteration to return error information, see comment
- # in raise_if_nothing_inferred.
- raise StopIteration(dict(node=stmt, context=context))
- # else, infer recursively, except Uninferable object that should be returned as is
- for inferred in stmt.infer(context):
- if inferred is util.Uninferable:
- yield inferred
- else:
- for inf_inf in unpack_infer(inferred, context):
- yield inf_inf
- raise StopIteration(dict(node=stmt, context=context))
-
-
-def are_exclusive(stmt1, stmt2, exceptions=None): # pylint: disable=redefined-outer-name
- """return true if the two given statements are mutually exclusive
-
- `exceptions` may be a list of exception names. If specified, discard If
- branches and check one of the statement is in an exception handler catching
- one of the given exceptions.
-
- algorithm :
- 1) index stmt1's parents
- 2) climb among stmt2's parents until we find a common parent
- 3) if the common parent is a If or TryExcept statement, look if nodes are
- in exclusive branches
- """
- # index stmt1's parents
- stmt1_parents = {}
- children = {}
- node = stmt1.parent
- previous = stmt1
- while node:
- stmt1_parents[node] = 1
- children[node] = previous
- previous = node
- node = node.parent
- # climb among stmt2's parents until we find a common parent
- node = stmt2.parent
- previous = stmt2
- while node:
- if node in stmt1_parents:
- # if the common parent is a If or TryExcept statement, look if
- # nodes are in exclusive branches
- if isinstance(node, If) and exceptions is None:
- if (node.locate_child(previous)[1]
- is not node.locate_child(children[node])[1]):
- return True
- elif isinstance(node, TryExcept):
- c2attr, c2node = node.locate_child(previous)
- c1attr, c1node = node.locate_child(children[node])
- if c1node is not c2node:
- first_in_body_caught_by_handlers = (
- c2attr == 'handlers'
- and c1attr == 'body'
- and previous.catch(exceptions))
- second_in_body_caught_by_handlers = (
- c2attr == 'body'
- and c1attr == 'handlers'
- and children[node].catch(exceptions))
- first_in_else_other_in_handlers = (
- c2attr == 'handlers' and c1attr == 'orelse')
- second_in_else_other_in_handlers = (
- c2attr == 'orelse' and c1attr == 'handlers')
- if any((first_in_body_caught_by_handlers,
- second_in_body_caught_by_handlers,
- first_in_else_other_in_handlers,
- second_in_else_other_in_handlers)):
- return True
- elif c2attr == 'handlers' and c1attr == 'handlers':
- return previous is not children[node]
- return False
- previous = node
- node = node.parent
- return False
-
-
-# getitem() helpers.
-
-_SLICE_SENTINEL = object()
-
-
-def _slice_value(index, context=None):
- """Get the value of the given slice index."""
-
- if isinstance(index, Const):
- if isinstance(index.value, (int, type(None))):
- return index.value
- elif index is None:
- return None
- else:
- # Try to infer what the index actually is.
- # Since we can't return all the possible values,
- # we'll stop at the first possible value.
- try:
- inferred = next(index.infer(context=context))
- except exceptions.InferenceError:
- pass
- else:
- if isinstance(inferred, Const):
- if isinstance(inferred.value, (int, type(None))):
- return inferred.value
-
- # Use a sentinel, because None can be a valid
- # value that this function can return,
- # as it is the case for unspecified bounds.
- return _SLICE_SENTINEL
-
-
-def _infer_slice(node, context=None):
- lower = _slice_value(node.lower, context)
- upper = _slice_value(node.upper, context)
- step = _slice_value(node.step, context)
- if all(elem is not _SLICE_SENTINEL for elem in (lower, upper, step)):
- return slice(lower, upper, step)
-
- raise exceptions.AstroidTypeError(
- message='Could not infer slice used in subscript',
- node=node, index=node.parent, context=context)
-
-
-def _container_getitem(instance, elts, index, context=None):
- """Get a slice or an item, using the given *index*, for the given sequence."""
- try:
- if isinstance(index, Slice):
- index_slice = _infer_slice(index, context=context)
- new_cls = instance.__class__()
- new_cls.elts = elts[index_slice]
- new_cls.parent = instance.parent
- return new_cls
- elif isinstance(index, Const):
- return elts[index.value]
- except IndexError:
- util.reraise(exceptions.AstroidIndexError(
- message='Index {index!s} out of range',
- node=instance, index=index, context=context))
- except TypeError as exc:
- util.reraise(exceptions.AstroidTypeError(
- message='Type error {error!r}', error=exc,
- node=instance, index=index, context=context))
-
- raise exceptions.AstroidTypeError(
- 'Could not use %s as subscript index' % index
- )
-
-
-class NodeNG(object):
- """Base Class for all Astroid node classes.
-
- It represents a node of the new abstract syntax tree.
- """
- is_statement = False
- optional_assign = False # True for For (and for Comprehension if py <3.0)
- is_function = False # True for FunctionDef nodes
- # attributes below are set by the builder module or by raw factories
- lineno = None
- col_offset = None
- # parent node in the tree
- parent = None
- # attributes containing child node(s) redefined in most concrete classes:
- _astroid_fields = ()
- # attributes containing non-nodes:
- _other_fields = ()
- # attributes containing AST-dependent fields:
- _other_other_fields = ()
- # instance specific inference function infer(node, context)
- _explicit_inference = None
-
- def __init__(self, lineno=None, col_offset=None, parent=None):
- self.lineno = lineno
- self.col_offset = col_offset
- self.parent = parent
-
- def infer(self, context=None, **kwargs):
- """main interface to the interface system, return a generator on inferred
- values.
-
- If the instance has some explicit inference function set, it will be
- called instead of the default interface.
- """
- if self._explicit_inference is not None:
- # explicit_inference is not bound, give it self explicitly
- try:
- # pylint: disable=not-callable
- return self._explicit_inference(self, context, **kwargs)
- except exceptions.UseInferenceDefault:
- pass
-
- if not context:
- return self._infer(context, **kwargs)
-
- key = (self, context.lookupname,
- context.callcontext, context.boundnode)
- if key in context.inferred:
- return iter(context.inferred[key])
-
- return context.cache_generator(key, self._infer(context, **kwargs))
-
- def _repr_name(self):
- """return self.name or self.attrname or '' for nice representation"""
- return getattr(self, 'name', getattr(self, 'attrname', ''))
-
- def __str__(self):
- rname = self._repr_name()
- cname = type(self).__name__
- if rname:
- string = '%(cname)s.%(rname)s(%(fields)s)'
- alignment = len(cname) + len(rname) + 2
- else:
- string = '%(cname)s(%(fields)s)'
- alignment = len(cname) + 1
- result = []
- for field in self._other_fields + self._astroid_fields:
- value = getattr(self, field)
- width = 80 - len(field) - alignment
- lines = pprint.pformat(value, indent=2,
- width=width).splitlines(True)
-
- inner = [lines[0]]
- for line in lines[1:]:
- inner.append(' ' * alignment + line)
- result.append('%s=%s' % (field, ''.join(inner)))
-
- return string % {'cname': cname,
- 'rname': rname,
- 'fields': (',\n' + ' ' * alignment).join(result)}
-
- def __repr__(self):
- rname = self._repr_name()
- if rname:
- string = '<%(cname)s.%(rname)s l.%(lineno)s at 0x%(id)x>'
- else:
- string = '<%(cname)s l.%(lineno)s at 0x%(id)x>'
- return string % {'cname': type(self).__name__,
- 'rname': rname,
- 'lineno': self.fromlineno,
- 'id': id(self)}
-
- def accept(self, visitor):
- func = getattr(visitor, "visit_" + self.__class__.__name__.lower())
- return func(self)
-
- def get_children(self):
- for field in self._astroid_fields:
- attr = getattr(self, field)
- if attr is None:
- continue
- if isinstance(attr, (list, tuple)):
- for elt in attr:
- yield elt
- else:
- yield attr
-
- def last_child(self):
- """an optimized version of list(get_children())[-1]"""
- for field in self._astroid_fields[::-1]:
- attr = getattr(self, field)
- if not attr: # None or empty listy / tuple
- continue
- if isinstance(attr, (list, tuple)):
- return attr[-1]
-
- return attr
- return None
-
- def parent_of(self, node):
- """return true if i'm a parent of the given node"""
- parent = node.parent
- while parent is not None:
- if self is parent:
- return True
- parent = parent.parent
- return False
-
- def statement(self):
- """return the first parent node marked as statement node"""
- if self.is_statement:
- return self
- return self.parent.statement()
-
- def frame(self):
- """return the first parent frame node (i.e. Module, FunctionDef or
- ClassDef)
-
- """
- return self.parent.frame()
-
- def scope(self):
- """return the first node defining a new scope (i.e. Module,
- FunctionDef, ClassDef, Lambda but also GenExpr)
-
- """
- return self.parent.scope()
-
- def root(self):
- """return the root node of the tree, (i.e. a Module)"""
- if self.parent:
- return self.parent.root()
- return self
-
- def child_sequence(self, child):
- """search for the right sequence where the child lies in"""
- for field in self._astroid_fields:
- node_or_sequence = getattr(self, field)
- if node_or_sequence is child:
- return [node_or_sequence]
- # /!\ compiler.ast Nodes have an __iter__ walking over child nodes
- if (isinstance(node_or_sequence, (tuple, list))
- and child in node_or_sequence):
- return node_or_sequence
-
- msg = 'Could not find %s in %s\'s children'
- raise exceptions.AstroidError(msg % (repr(child), repr(self)))
-
- def locate_child(self, child):
- """return a 2-uple (child attribute name, sequence or node)"""
- for field in self._astroid_fields:
- node_or_sequence = getattr(self, field)
- # /!\ compiler.ast Nodes have an __iter__ walking over child nodes
- if child is node_or_sequence:
- return field, child
- if isinstance(node_or_sequence, (tuple, list)) and child in node_or_sequence:
- return field, node_or_sequence
- msg = 'Could not find %s in %s\'s children'
- raise exceptions.AstroidError(msg % (repr(child), repr(self)))
- # FIXME : should we merge child_sequence and locate_child ? locate_child
- # is only used in are_exclusive, child_sequence one time in pylint.
-
- def next_sibling(self):
- """return the next sibling statement"""
- return self.parent.next_sibling()
-
- def previous_sibling(self):
- """return the previous sibling statement"""
- return self.parent.previous_sibling()
-
- def nearest(self, nodes):
- """return the node which is the nearest before this one in the
- given list of nodes
- """
- myroot = self.root()
- mylineno = self.fromlineno
- nearest = None, 0
- for node in nodes:
- assert node.root() is myroot, \
- 'nodes %s and %s are not from the same module' % (self, node)
- lineno = node.fromlineno
- if node.fromlineno > mylineno:
- break
- if lineno > nearest[1]:
- nearest = node, lineno
- # FIXME: raise an exception if nearest is None ?
- return nearest[0]
-
- # these are lazy because they're relatively expensive to compute for every
- # single node, and they rarely get looked at
-
- @decorators.cachedproperty
- def fromlineno(self):
- if self.lineno is None:
- return self._fixed_source_line()
-
- return self.lineno
-
- @decorators.cachedproperty
- def tolineno(self):
- if not self._astroid_fields:
- # can't have children
- lastchild = None
- else:
- lastchild = self.last_child()
- if lastchild is None:
- return self.fromlineno
-
- return lastchild.tolineno
-
- def _fixed_source_line(self):
- """return the line number where the given node appears
-
- we need this method since not all nodes have the lineno attribute
- correctly set...
- """
- line = self.lineno
- _node = self
- try:
- while line is None:
- _node = next(_node.get_children())
- line = _node.lineno
- except StopIteration:
- _node = self.parent
- while _node and line is None:
- line = _node.lineno
- _node = _node.parent
- return line
-
- def block_range(self, lineno):
- """handle block line numbers range for non block opening statements
- """
- return lineno, self.tolineno
-
- def set_local(self, name, stmt):
- """delegate to a scoped parent handling a locals dictionary"""
- self.parent.set_local(name, stmt)
-
- def nodes_of_class(self, klass, skip_klass=None):
- """return an iterator on nodes which are instance of the given class(es)
-
- klass may be a class object or a tuple of class objects
- """
- if isinstance(self, klass):
- yield self
- for child_node in self.get_children():
- if skip_klass is not None and isinstance(child_node, skip_klass):
- continue
- for matching in child_node.nodes_of_class(klass, skip_klass):
- yield matching
-
- def _infer_name(self, frame, name):
- # overridden for ImportFrom, Import, Global, TryExcept and Arguments
- return None
-
- def _infer(self, context=None):
- """we don't know how to resolve a statement by default"""
- # this method is overridden by most concrete classes
- raise exceptions.InferenceError('No inference function for {node!r}.',
- node=self, context=context)
-
- def inferred(self):
- '''return list of inferred values for a more simple inference usage'''
- return list(self.infer())
-
- def infered(self):
- warnings.warn('%s.infered() is deprecated and slated for removal '
- 'in astroid 2.0, use %s.inferred() instead.'
- % (type(self).__name__, type(self).__name__),
- PendingDeprecationWarning, stacklevel=2)
- return self.inferred()
-
- def instantiate_class(self):
- """instantiate a node if it is a ClassDef node, else return self"""
- return self
-
- def has_base(self, node):
- return False
-
- def callable(self):
- return False
-
- def eq(self, value):
- return False
-
- def as_string(self):
- return as_string.to_code(self)
-
- def repr_tree(self, ids=False, include_linenos=False,
- ast_state=False, indent=' ', max_depth=0, max_width=80):
- """Returns a string representation of the AST from this node.
-
- :param ids: If true, includes the ids with the node type names.
-
- :param include_linenos: If true, includes the line numbers and
- column offsets.
-
- :param ast_state: If true, includes information derived from
- the whole AST like local and global variables.
-
- :param indent: A string to use to indent the output string.
-
- :param max_depth: If set to a positive integer, won't return
- nodes deeper than max_depth in the string.
-
- :param max_width: Only positive integer values are valid, the
- default is 80. Attempts to format the output string to stay
- within max_width characters, but can exceed it under some
- circumstances.
- """
- @_singledispatch
- def _repr_tree(node, result, done, cur_indent='', depth=1):
- """Outputs a representation of a non-tuple/list, non-node that's
- contained within an AST, including strings.
- """
- lines = pprint.pformat(node,
- width=max(max_width - len(cur_indent),
- 1)).splitlines(True)
- result.append(lines[0])
- result.extend([cur_indent + line for line in lines[1:]])
- return len(lines) != 1
-
- # pylint: disable=unused-variable; doesn't understand singledispatch
- @_repr_tree.register(tuple)
- @_repr_tree.register(list)
- def _repr_seq(node, result, done, cur_indent='', depth=1):
- """Outputs a representation of a sequence that's contained within an AST."""
- cur_indent += indent
- result.append('[')
- if not node:
- broken = False
- elif len(node) == 1:
- broken = _repr_tree(node[0], result, done, cur_indent, depth)
- elif len(node) == 2:
- broken = _repr_tree(node[0], result, done, cur_indent, depth)
- if not broken:
- result.append(', ')
- else:
- result.append(',\n')
- result.append(cur_indent)
- broken = (_repr_tree(node[1], result, done, cur_indent, depth)
- or broken)
- else:
- result.append('\n')
- result.append(cur_indent)
- for child in node[:-1]:
- _repr_tree(child, result, done, cur_indent, depth)
- result.append(',\n')
- result.append(cur_indent)
- _repr_tree(node[-1], result, done, cur_indent, depth)
- broken = True
- result.append(']')
- return broken
-
- # pylint: disable=unused-variable; doesn't understand singledispatch
- @_repr_tree.register(NodeNG)
- def _repr_node(node, result, done, cur_indent='', depth=1):
- """Outputs a strings representation of an astroid node."""
- if node in done:
- result.append(indent + ' max_depth:
- result.append('...')
- return False
- depth += 1
- cur_indent += indent
- if ids:
- result.append('%s<0x%x>(\n' % (type(node).__name__, id(node)))
- else:
- result.append('%s(' % type(node).__name__)
- fields = []
- if include_linenos:
- fields.extend(('lineno', 'col_offset'))
- fields.extend(node._other_fields)
- fields.extend(node._astroid_fields)
- if ast_state:
- fields.extend(node._other_other_fields)
- if not fields:
- broken = False
- elif len(fields) == 1:
- result.append('%s=' % fields[0])
- broken = _repr_tree(getattr(node, fields[0]), result, done,
- cur_indent, depth)
- else:
- result.append('\n')
- result.append(cur_indent)
- for field in fields[:-1]:
- result.append('%s=' % field)
- _repr_tree(getattr(node, field), result, done, cur_indent,
- depth)
- result.append(',\n')
- result.append(cur_indent)
- result.append('%s=' % fields[-1])
- _repr_tree(getattr(node, fields[-1]), result, done, cur_indent,
- depth)
- broken = True
- result.append(')')
- return broken
-
- result = []
- _repr_tree(self, result, set())
- return ''.join(result)
-
- def bool_value(self):
- """Determine the bool value of this node
-
- The boolean value of a node can have three
- possible values:
-
- * False. For instance, empty data structures,
- False, empty strings, instances which return
- explicitly False from the __nonzero__ / __bool__
- method.
- * True. Most of constructs are True by default:
- classes, functions, modules etc
- * Uninferable: the inference engine is uncertain of the
- node's value.
- """
- return util.Uninferable
-
-
-class Statement(NodeNG):
- """Statement node adding a few attributes"""
- is_statement = True
-
- def next_sibling(self):
- """return the next sibling statement"""
- stmts = self.parent.child_sequence(self)
- index = stmts.index(self)
- try:
- return stmts[index +1]
- except IndexError:
- pass
-
- def previous_sibling(self):
- """return the previous sibling statement"""
- stmts = self.parent.child_sequence(self)
- index = stmts.index(self)
- if index >= 1:
- return stmts[index -1]
-
-
-
-@six.add_metaclass(abc.ABCMeta)
-class _BaseContainer(mixins.ParentAssignTypeMixin,
- NodeNG, bases.Instance):
- """Base class for Set, FrozenSet, Tuple and List."""
-
- _astroid_fields = ('elts',)
-
- def __init__(self, lineno=None, col_offset=None, parent=None):
- self.elts = []
- super(_BaseContainer, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, elts):
- self.elts = elts
-
- @classmethod
- def from_constants(cls, elts=None):
- node = cls()
- if elts is None:
- node.elts = []
- else:
- node.elts = [const_factory(e) for e in elts]
- return node
-
- def itered(self):
- return self.elts
-
- def bool_value(self):
- return bool(self.elts)
-
- @abc.abstractmethod
- def pytype(self):
- pass
-
-
-class LookupMixIn(object):
- """Mixin looking up a name in the right scope
- """
-
- def lookup(self, name):
- """lookup a variable name
-
- return the scope node and the list of assignments associated to the
- given name according to the scope where it has been found (locals,
- globals or builtin)
-
- The lookup is starting from self's scope. If self is not a frame itself
- and the name is found in the inner frame locals, statements will be
- filtered to remove ignorable statements according to self's location
- """
- return self.scope().scope_lookup(self, name)
-
- def ilookup(self, name):
- """inferred lookup
-
- return an iterator on inferred values of the statements returned by
- the lookup method
- """
- frame, stmts = self.lookup(name)
- context = contextmod.InferenceContext()
- return bases._infer_stmts(stmts, context, frame)
-
- def _filter_stmts(self, stmts, frame, offset):
- """filter statements to remove ignorable statements.
-
- If self is not a frame itself and the name is found in the inner
- frame locals, statements will be filtered to remove ignorable
- statements according to self's location
- """
- # if offset == -1, my actual frame is not the inner frame but its parent
- #
- # class A(B): pass
- #
- # we need this to resolve B correctly
- if offset == -1:
- myframe = self.frame().parent.frame()
- else:
- myframe = self.frame()
- # If the frame of this node is the same as the statement
- # of this node, then the node is part of a class or
- # a function definition and the frame of this node should be the
- # the upper frame, not the frame of the definition.
- # For more information why this is important,
- # see Pylint issue #295.
- # For example, for 'b', the statement is the same
- # as the frame / scope:
- #
- # def test(b=1):
- # ...
-
- if self.statement() is myframe and myframe.parent:
- myframe = myframe.parent.frame()
- mystmt = self.statement()
- # line filtering if we are in the same frame
- #
- # take care node may be missing lineno information (this is the case for
- # nodes inserted for living objects)
- if myframe is frame and mystmt.fromlineno is not None:
- assert mystmt.fromlineno is not None, mystmt
- mylineno = mystmt.fromlineno + offset
- else:
- # disabling lineno filtering
- mylineno = 0
- _stmts = []
- _stmt_parents = []
- for node in stmts:
- stmt = node.statement()
- # line filtering is on and we have reached our location, break
- if mylineno > 0 and stmt.fromlineno > mylineno:
- break
- assert hasattr(node, 'assign_type'), (node, node.scope(),
- node.scope().locals)
- assign_type = node.assign_type()
- if node.has_base(self):
- break
-
- _stmts, done = assign_type._get_filtered_stmts(self, node, _stmts, mystmt)
- if done:
- break
-
- optional_assign = assign_type.optional_assign
- if optional_assign and assign_type.parent_of(self):
- # we are inside a loop, loop var assignment is hiding previous
- # assignment
- _stmts = [node]
- _stmt_parents = [stmt.parent]
- continue
-
- # XXX comment various branches below!!!
- try:
- pindex = _stmt_parents.index(stmt.parent)
- except ValueError:
- pass
- else:
- # we got a parent index, this means the currently visited node
- # is at the same block level as a previously visited node
- if _stmts[pindex].assign_type().parent_of(assign_type):
- # both statements are not at the same block level
- continue
- # if currently visited node is following previously considered
- # assignment and both are not exclusive, we can drop the
- # previous one. For instance in the following code ::
- #
- # if a:
- # x = 1
- # else:
- # x = 2
- # print x
- #
- # we can't remove neither x = 1 nor x = 2 when looking for 'x'
- # of 'print x'; while in the following ::
- #
- # x = 1
- # x = 2
- # print x
- #
- # we can remove x = 1 when we see x = 2
- #
- # moreover, on loop assignment types, assignment won't
- # necessarily be done if the loop has no iteration, so we don't
- # want to clear previous assignments if any (hence the test on
- # optional_assign)
- if not (optional_assign or are_exclusive(_stmts[pindex], node)):
- del _stmt_parents[pindex]
- del _stmts[pindex]
- if isinstance(node, AssignName):
- if not optional_assign and stmt.parent is mystmt.parent:
- _stmts = []
- _stmt_parents = []
- elif isinstance(node, DelName):
- _stmts = []
- _stmt_parents = []
- continue
- if not are_exclusive(self, node):
- _stmts.append(node)
- _stmt_parents.append(stmt.parent)
- return _stmts
-
-
-# Name classes
-
-class AssignName(LookupMixIn, mixins.ParentAssignTypeMixin, NodeNG):
- """class representing an AssignName node"""
- _other_fields = ('name',)
-
- def __init__(self, name=None, lineno=None, col_offset=None, parent=None):
- self.name = name
- super(AssignName, self).__init__(lineno, col_offset, parent)
-
-
-class DelName(LookupMixIn, mixins.ParentAssignTypeMixin, NodeNG):
- """class representing a DelName node"""
- _other_fields = ('name',)
-
- def __init__(self, name=None, lineno=None, col_offset=None, parent=None):
- self.name = name
- super(DelName, self).__init__(lineno, col_offset, parent)
-
-
-class Name(LookupMixIn, NodeNG):
- """class representing a Name node"""
- _other_fields = ('name',)
-
- def __init__(self, name=None, lineno=None, col_offset=None, parent=None):
- self.name = name
- super(Name, self).__init__(lineno, col_offset, parent)
-
-
-class Arguments(mixins.AssignTypeMixin, NodeNG):
- """class representing an Arguments node"""
- if six.PY3:
- # Python 3.4+ uses a different approach regarding annotations,
- # each argument is a new class, _ast.arg, which exposes an
- # 'annotation' attribute. In astroid though, arguments are exposed
- # as is in the Arguments node and the only way to expose annotations
- # is by using something similar with Python 3.3:
- # - we expose 'varargannotation' and 'kwargannotation' of annotations
- # of varargs and kwargs.
- # - we expose 'annotation', a list with annotations for
- # for each normal argument. If an argument doesn't have an
- # annotation, its value will be None.
-
- _astroid_fields = ('args', 'defaults', 'kwonlyargs',
- 'kw_defaults', 'annotations', 'varargannotation',
- 'kwargannotation', 'kwonlyargs_annotations')
- varargannotation = None
- kwargannotation = None
- else:
- _astroid_fields = ('args', 'defaults', 'kwonlyargs', 'kw_defaults')
- _other_fields = ('vararg', 'kwarg')
-
- def __init__(self, vararg=None, kwarg=None, parent=None):
- super(Arguments, self).__init__(parent=parent)
- self.vararg = vararg
- self.kwarg = kwarg
- self.args = []
- self.defaults = []
- self.kwonlyargs = []
- self.kw_defaults = []
- self.annotations = []
- self.kwonlyargs_annotations = []
-
- def postinit(self, args, defaults, kwonlyargs, kw_defaults,
- annotations,
- kwonlyargs_annotations=None,
- varargannotation=None,
- kwargannotation=None):
- self.args = args
- self.defaults = defaults
- self.kwonlyargs = kwonlyargs
- self.kw_defaults = kw_defaults
- self.annotations = annotations
- self.kwonlyargs_annotations = kwonlyargs_annotations
- self.varargannotation = varargannotation
- self.kwargannotation = kwargannotation
-
- def _infer_name(self, frame, name):
- if self.parent is frame:
- return name
- return None
-
- @decorators.cachedproperty
- def fromlineno(self):
- lineno = super(Arguments, self).fromlineno
- return max(lineno, self.parent.fromlineno or 0)
-
- def format_args(self):
- """return arguments formatted as string"""
- result = []
- if self.args:
- result.append(
- _format_args(self.args, self.defaults,
- getattr(self, 'annotations', None))
- )
- if self.vararg:
- result.append('*%s' % self.vararg)
- if self.kwonlyargs:
- if not self.vararg:
- result.append('*')
- result.append(_format_args(
- self.kwonlyargs,
- self.kw_defaults,
- self.kwonlyargs_annotations
- ))
- if self.kwarg:
- result.append('**%s' % self.kwarg)
- return ', '.join(result)
-
- def default_value(self, argname):
- """return the default value for an argument
-
- :raise `NoDefault`: if there is no default value defined
- """
- i = _find_arg(argname, self.args)[0]
- if i is not None:
- idx = i - (len(self.args) - len(self.defaults))
- if idx >= 0:
- return self.defaults[idx]
- i = _find_arg(argname, self.kwonlyargs)[0]
- if i is not None and self.kw_defaults[i] is not None:
- return self.kw_defaults[i]
- raise exceptions.NoDefault(func=self.parent, name=argname)
-
- def is_argument(self, name):
- """return True if the name is defined in arguments"""
- if name == self.vararg:
- return True
- if name == self.kwarg:
- return True
- return (self.find_argname(name, True)[1] is not None or
- self.kwonlyargs and _find_arg(name, self.kwonlyargs, True)[1] is not None)
-
- def find_argname(self, argname, rec=False):
- """return index and Name node with given name"""
- if self.args: # self.args may be None in some cases (builtin function)
- return _find_arg(argname, self.args, rec)
- return None, None
-
- def get_children(self):
- """override get_children to skip over None elements in kw_defaults"""
- for child in super(Arguments, self).get_children():
- if child is not None:
- yield child
-
-
-def _find_arg(argname, args, rec=False):
- for i, arg in enumerate(args):
- if isinstance(arg, Tuple):
- if rec:
- found = _find_arg(argname, arg.elts)
- if found[0] is not None:
- return found
- elif arg.name == argname:
- return i, arg
- return None, None
-
-
-def _format_args(args, defaults=None, annotations=None):
- values = []
- if args is None:
- return ''
- if annotations is None:
- annotations = []
- if defaults is not None:
- default_offset = len(args) - len(defaults)
- packed = six.moves.zip_longest(args, annotations)
- for i, (arg, annotation) in enumerate(packed):
- if isinstance(arg, Tuple):
- values.append('(%s)' % _format_args(arg.elts))
- else:
- argname = arg.name
- if annotation is not None:
- argname += ':' + annotation.as_string()
- values.append(argname)
-
- if defaults is not None and i >= default_offset:
- if defaults[i-default_offset] is not None:
- values[-1] += '=' + defaults[i-default_offset].as_string()
- return ', '.join(values)
-
-
-class AssignAttr(mixins.ParentAssignTypeMixin, NodeNG):
- """class representing an AssignAttr node"""
- _astroid_fields = ('expr',)
- _other_fields = ('attrname',)
- expr = None
-
- def __init__(self, attrname=None, lineno=None, col_offset=None, parent=None):
- self.attrname = attrname
- super(AssignAttr, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, expr=None):
- self.expr = expr
-
-
-class Assert(Statement):
- """class representing an Assert node"""
- _astroid_fields = ('test', 'fail',)
- test = None
- fail = None
-
- def postinit(self, test=None, fail=None):
- self.fail = fail
- self.test = test
-
-
-class Assign(mixins.AssignTypeMixin, Statement):
- """class representing an Assign node"""
- _astroid_fields = ('targets', 'value',)
- targets = None
- value = None
-
- def postinit(self, targets=None, value=None):
- self.targets = targets
- self.value = value
-
-
-class AnnAssign(mixins.AssignTypeMixin, Statement):
- """Class representing an AnnAssign node"""
-
- _astroid_fields = ('target', 'annotation', 'value',)
- _other_fields = ('simple',)
- target = None
- annotation = None
- value = None
- simple = None
-
- def postinit(self, target, annotation, simple, value=None):
- self.target = target
- self.annotation = annotation
- self.value = value
- self.simple = simple
-
-
-class AugAssign(mixins.AssignTypeMixin, Statement):
- """class representing an AugAssign node"""
- _astroid_fields = ('target', 'value')
- _other_fields = ('op',)
- target = None
- value = None
-
- def __init__(self, op=None, lineno=None, col_offset=None, parent=None):
- self.op = op
- super(AugAssign, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, target=None, value=None):
- self.target = target
- self.value = value
-
- # This is set by inference.py
- def _infer_augassign(self, context=None):
- raise NotImplementedError
-
- def type_errors(self, context=None):
- """Return a list of TypeErrors which can occur during inference.
-
- Each TypeError is represented by a :class:`BadBinaryOperationMessage`,
- which holds the original exception.
- """
- try:
- results = self._infer_augassign(context=context)
- return [result for result in results
- if isinstance(result, util.BadBinaryOperationMessage)]
- except exceptions.InferenceError:
- return []
-
-
-class Repr(NodeNG):
- """class representing a Repr node"""
- _astroid_fields = ('value',)
- value = None
-
- def postinit(self, value=None):
- self.value = value
-
-
-class BinOp(NodeNG):
- """class representing a BinOp node"""
- _astroid_fields = ('left', 'right')
- _other_fields = ('op',)
- left = None
- right = None
-
- def __init__(self, op=None, lineno=None, col_offset=None, parent=None):
- self.op = op
- super(BinOp, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, left=None, right=None):
- self.left = left
- self.right = right
-
- # This is set by inference.py
- def _infer_binop(self, context=None):
- raise NotImplementedError
-
- def type_errors(self, context=None):
- """Return a list of TypeErrors which can occur during inference.
-
- Each TypeError is represented by a :class:`BadBinaryOperationMessage`,
- which holds the original exception.
- """
- try:
- results = self._infer_binop(context=context)
- return [result for result in results
- if isinstance(result, util.BadBinaryOperationMessage)]
- except exceptions.InferenceError:
- return []
-
-
-class BoolOp(NodeNG):
- """class representing a BoolOp node"""
- _astroid_fields = ('values',)
- _other_fields = ('op',)
- values = None
-
- def __init__(self, op=None, lineno=None, col_offset=None, parent=None):
- self.op = op
- super(BoolOp, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, values=None):
- self.values = values
-
-
-class Break(Statement):
- """class representing a Break node"""
-
-
-class Call(NodeNG):
- """class representing a Call node"""
- _astroid_fields = ('func', 'args', 'keywords')
- func = None
- args = None
- keywords = None
-
- def postinit(self, func=None, args=None, keywords=None):
- self.func = func
- self.args = args
- self.keywords = keywords
-
- @property
- def starargs(self):
- args = self.args or []
- return [arg for arg in args if isinstance(arg, Starred)]
-
- @property
- def kwargs(self):
- keywords = self.keywords or []
- return [keyword for keyword in keywords if keyword.arg is None]
-
-
-class Compare(NodeNG):
- """class representing a Compare node"""
- _astroid_fields = ('left', 'ops',)
- left = None
- ops = None
-
- def postinit(self, left=None, ops=None):
- self.left = left
- self.ops = ops
-
- def get_children(self):
- """override get_children for tuple fields"""
- yield self.left
- for _, comparator in self.ops:
- yield comparator # we don't want the 'op'
-
- def last_child(self):
- """override last_child"""
- # XXX maybe if self.ops:
- return self.ops[-1][1]
- #return self.left
-
-
-class Comprehension(NodeNG):
- """class representing a Comprehension node"""
- _astroid_fields = ('target', 'iter', 'ifs')
- _other_fields = ('is_async',)
- target = None
- iter = None
- ifs = None
- is_async = None
-
- def __init__(self, parent=None):
- super(Comprehension, self).__init__()
- self.parent = parent
-
- # pylint: disable=redefined-builtin; same name as builtin ast module.
- def postinit(self, target=None, iter=None, ifs=None, is_async=None):
- self.target = target
- self.iter = iter
- self.ifs = ifs
- self.is_async = is_async
-
- optional_assign = True
- def assign_type(self):
- return self
-
- def ass_type(self):
- warnings.warn('%s.ass_type() is deprecated and slated for removal'
- 'in astroid 2.0, use %s.assign_type() instead.'
- % (type(self).__name__, type(self).__name__),
- PendingDeprecationWarning, stacklevel=2)
- return self.assign_type()
-
- def _get_filtered_stmts(self, lookup_node, node, stmts, mystmt):
- """method used in filter_stmts"""
- if self is mystmt:
- if isinstance(lookup_node, (Const, Name)):
- return [lookup_node], True
-
- elif self.statement() is mystmt:
- # original node's statement is the assignment, only keeps
- # current node (gen exp, list comp)
-
- return [node], True
-
- return stmts, False
-
-
-class Const(NodeNG, bases.Instance):
- """represent a constant node like num, str, bool, None, bytes"""
- _other_fields = ('value',)
-
- def __init__(self, value, lineno=None, col_offset=None, parent=None):
- self.value = value
- super(Const, self).__init__(lineno, col_offset, parent)
-
- def getitem(self, index, context=None):
- if isinstance(index, Const):
- index_value = index.value
- elif isinstance(index, Slice):
- index_value = _infer_slice(index, context=context)
-
- else:
- raise exceptions.AstroidTypeError(
- 'Could not use type {} as subscript index'.format(type(index))
- )
-
- try:
- if isinstance(self.value, six.string_types):
- return Const(self.value[index_value])
- if isinstance(self.value, bytes) and six.PY3:
- # Bytes aren't instances of six.string_types
- # on Python 3. Also, indexing them should return
- # integers.
- return Const(self.value[index_value])
- except IndexError as exc:
- util.reraise(exceptions.AstroidIndexError(
- message='Index {index!r} out of range', error=exc,
- node=self, index=index, context=context))
- except TypeError as exc:
- util.reraise(exceptions.AstroidTypeError(
- message='Type error {error!r}', error=exc,
- node=self, index=index, context=context))
-
- raise exceptions.AstroidTypeError(
- '%r (value=%s)' % (self, self.value)
- )
-
- def has_dynamic_getattr(self):
- return False
-
- def itered(self):
- if isinstance(self.value, six.string_types):
- return self.value
- raise TypeError()
-
- def pytype(self):
- return self._proxied.qname()
-
- def bool_value(self):
- return bool(self.value)
-
-
-class Continue(Statement):
- """class representing a Continue node"""
-
-
-class Decorators(NodeNG):
- """class representing a Decorators node"""
- _astroid_fields = ('nodes',)
- nodes = None
-
- def postinit(self, nodes):
- self.nodes = nodes
-
- def scope(self):
- # skip the function node to go directly to the upper level scope
- return self.parent.parent.scope()
-
-
-class DelAttr(mixins.ParentAssignTypeMixin, NodeNG):
- """class representing a DelAttr node"""
- _astroid_fields = ('expr',)
- _other_fields = ('attrname',)
- expr = None
-
- def __init__(self, attrname=None, lineno=None, col_offset=None, parent=None):
- self.attrname = attrname
- super(DelAttr, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, expr=None):
- self.expr = expr
-
-
-class Delete(mixins.AssignTypeMixin, Statement):
- """class representing a Delete node"""
- _astroid_fields = ('targets',)
- targets = None
-
- def postinit(self, targets=None):
- self.targets = targets
-
-
-class Dict(NodeNG, bases.Instance):
- """class representing a Dict node"""
- _astroid_fields = ('items',)
-
- def __init__(self, lineno=None, col_offset=None, parent=None):
- self.items = []
- super(Dict, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, items):
- self.items = items
-
- @classmethod
- def from_constants(cls, items=None):
- node = cls()
- if items is None:
- node.items = []
- else:
- node.items = [(const_factory(k), const_factory(v))
- for k, v in items.items()]
- return node
-
- def pytype(self):
- return '%s.dict' % BUILTINS
-
- def get_children(self):
- """get children of a Dict node"""
- # overrides get_children
- for key, value in self.items:
- yield key
- yield value
-
- def last_child(self):
- """override last_child"""
- if self.items:
- return self.items[-1][1]
- return None
-
- def itered(self):
- return self.items[::2]
-
- def getitem(self, index, context=None):
- for key, value in self.items:
- # TODO(cpopa): no support for overriding yet, {1:2, **{1: 3}}.
- if isinstance(key, DictUnpack):
- try:
- return value.getitem(index, context)
- except (exceptions.AstroidTypeError, exceptions.AstroidIndexError):
- continue
- for inferredkey in key.infer(context):
- if inferredkey is util.Uninferable:
- continue
- if isinstance(inferredkey, Const) and isinstance(index, Const):
- if inferredkey.value == index.value:
- return value
-
- raise exceptions.AstroidIndexError(index)
-
- def bool_value(self):
- return bool(self.items)
-
-
-class Expr(Statement):
- """class representing a Expr node"""
- _astroid_fields = ('value',)
- value = None
-
- def postinit(self, value=None):
- self.value = value
-
-
-class Ellipsis(NodeNG): # pylint: disable=redefined-builtin
- """class representing an Ellipsis node"""
-
- def bool_value(self):
- return True
-
-
-class EmptyNode(NodeNG):
- """class representing an EmptyNode node"""
-
- object = None
-
-
-class ExceptHandler(mixins.AssignTypeMixin, Statement):
- """class representing an ExceptHandler node"""
- _astroid_fields = ('type', 'name', 'body',)
- type = None
- name = None
- body = None
-
- # pylint: disable=redefined-builtin; had to use the same name as builtin ast module.
- def postinit(self, type=None, name=None, body=None):
- self.type = type
- self.name = name
- self.body = body
-
- @decorators.cachedproperty
- def blockstart_tolineno(self):
- if self.name:
- return self.name.tolineno
- elif self.type:
- return self.type.tolineno
-
- return self.lineno
-
- def catch(self, exceptions): # pylint: disable=redefined-outer-name
- if self.type is None or exceptions is None:
- return True
- for node in self.type.nodes_of_class(Name):
- if node.name in exceptions:
- return True
-
-
-class Exec(Statement):
- """class representing an Exec node"""
- _astroid_fields = ('expr', 'globals', 'locals',)
- expr = None
- globals = None
- locals = None
-
- # pylint: disable=redefined-builtin; had to use the same name as builtin ast module.
- def postinit(self, expr=None, globals=None, locals=None):
- self.expr = expr
- self.globals = globals
- self.locals = locals
-
-
-class ExtSlice(NodeNG):
- """class representing an ExtSlice node"""
- _astroid_fields = ('dims',)
- dims = None
-
- def postinit(self, dims=None):
- self.dims = dims
-
-
-class For(mixins.BlockRangeMixIn, mixins.AssignTypeMixin, Statement):
- """class representing a For node"""
- _astroid_fields = ('target', 'iter', 'body', 'orelse',)
- target = None
- iter = None
- body = None
- orelse = None
-
- # pylint: disable=redefined-builtin; had to use the same name as builtin ast module.
- def postinit(self, target=None, iter=None, body=None, orelse=None):
- self.target = target
- self.iter = iter
- self.body = body
- self.orelse = orelse
-
- optional_assign = True
- @decorators.cachedproperty
- def blockstart_tolineno(self):
- return self.iter.tolineno
-
-
-class AsyncFor(For):
- """Asynchronous For built with `async` keyword."""
-
-
-class Await(NodeNG):
- """Await node for the `await` keyword."""
-
- _astroid_fields = ('value', )
- value = None
-
- def postinit(self, value=None):
- self.value = value
-
-
-class ImportFrom(mixins.ImportFromMixin, Statement):
- """class representing a ImportFrom node"""
- _other_fields = ('modname', 'names', 'level')
-
- def __init__(self, fromname, names, level=0, lineno=None,
- col_offset=None, parent=None):
- self.modname = fromname
- self.names = names
- self.level = level
- super(ImportFrom, self).__init__(lineno, col_offset, parent)
-
-
-class Attribute(NodeNG):
- """class representing a Attribute node"""
- _astroid_fields = ('expr',)
- _other_fields = ('attrname',)
- expr = None
-
- def __init__(self, attrname=None, lineno=None, col_offset=None, parent=None):
- self.attrname = attrname
- super(Attribute, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, expr=None):
- self.expr = expr
-
-
-class Global(Statement):
- """class representing a Global node"""
- _other_fields = ('names',)
-
- def __init__(self, names, lineno=None, col_offset=None, parent=None):
- self.names = names
- super(Global, self).__init__(lineno, col_offset, parent)
-
- def _infer_name(self, frame, name):
- return name
-
-
-class If(mixins.BlockRangeMixIn, Statement):
- """class representing an If node"""
- _astroid_fields = ('test', 'body', 'orelse')
- test = None
- body = None
- orelse = None
-
- def postinit(self, test=None, body=None, orelse=None):
- self.test = test
- self.body = body
- self.orelse = orelse
-
- @decorators.cachedproperty
- def blockstart_tolineno(self):
- return self.test.tolineno
-
- def block_range(self, lineno):
- """handle block line numbers range for if statements"""
- if lineno == self.body[0].fromlineno:
- return lineno, lineno
- if lineno <= self.body[-1].tolineno:
- return lineno, self.body[-1].tolineno
- return self._elsed_block_range(lineno, self.orelse,
- self.body[0].fromlineno - 1)
-
-
-class IfExp(NodeNG):
- """class representing an IfExp node"""
- _astroid_fields = ('test', 'body', 'orelse')
- test = None
- body = None
- orelse = None
-
- def postinit(self, test=None, body=None, orelse=None):
- self.test = test
- self.body = body
- self.orelse = orelse
-
-
-class Import(mixins.ImportFromMixin, Statement):
- """class representing an Import node"""
- _other_fields = ('names',)
-
- def __init__(self, names=None, lineno=None, col_offset=None, parent=None):
- self.names = names
- super(Import, self).__init__(lineno, col_offset, parent)
-
-
-class Index(NodeNG):
- """class representing an Index node"""
- _astroid_fields = ('value',)
- value = None
-
- def postinit(self, value=None):
- self.value = value
-
-
-class Keyword(NodeNG):
- """class representing a Keyword node"""
- _astroid_fields = ('value',)
- _other_fields = ('arg',)
- value = None
-
- def __init__(self, arg=None, lineno=None, col_offset=None, parent=None):
- self.arg = arg
- super(Keyword, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, value=None):
- self.value = value
-
-
-class List(_BaseContainer):
- """class representing a List node"""
- _other_fields = ('ctx',)
-
- def __init__(self, ctx=None, lineno=None,
- col_offset=None, parent=None):
- self.ctx = ctx
- super(List, self).__init__(lineno, col_offset, parent)
-
- def pytype(self):
- return '%s.list' % BUILTINS
-
- def getitem(self, index, context=None):
- return _container_getitem(self, self.elts, index, context=context)
-
-
-class Nonlocal(Statement):
- """class representing a Nonlocal node"""
- _other_fields = ('names',)
-
- def __init__(self, names, lineno=None, col_offset=None, parent=None):
- self.names = names
- super(Nonlocal, self).__init__(lineno, col_offset, parent)
-
- def _infer_name(self, frame, name):
- return name
-
-
-class Pass(Statement):
- """class representing a Pass node"""
-
-
-class Print(Statement):
- """class representing a Print node"""
- _astroid_fields = ('dest', 'values',)
- dest = None
- values = None
-
- def __init__(self, nl=None, lineno=None, col_offset=None, parent=None):
- self.nl = nl
- super(Print, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, dest=None, values=None):
- self.dest = dest
- self.values = values
-
-
-class Raise(Statement):
- """class representing a Raise node"""
- exc = None
- if six.PY2:
- _astroid_fields = ('exc', 'inst', 'tback')
- inst = None
- tback = None
-
- def postinit(self, exc=None, inst=None, tback=None):
- self.exc = exc
- self.inst = inst
- self.tback = tback
- else:
- _astroid_fields = ('exc', 'cause')
- exc = None
- cause = None
-
- def postinit(self, exc=None, cause=None):
- self.exc = exc
- self.cause = cause
-
- def raises_not_implemented(self):
- if not self.exc:
- return
- for name in self.exc.nodes_of_class(Name):
- if name.name == 'NotImplementedError':
- return True
-
-
-class Return(Statement):
- """class representing a Return node"""
- _astroid_fields = ('value',)
- value = None
-
- def postinit(self, value=None):
- self.value = value
-
-
-class Set(_BaseContainer):
- """class representing a Set node"""
-
- def pytype(self):
- return '%s.set' % BUILTINS
-
-
-class Slice(NodeNG):
- """class representing a Slice node"""
- _astroid_fields = ('lower', 'upper', 'step')
- lower = None
- upper = None
- step = None
-
- def postinit(self, lower=None, upper=None, step=None):
- self.lower = lower
- self.upper = upper
- self.step = step
-
- def _wrap_attribute(self, attr):
- """Wrap the empty attributes of the Slice in a Const node."""
- if not attr:
- const = const_factory(attr)
- const.parent = self
- return const
- return attr
-
- @decorators.cachedproperty
- def _proxied(self):
- builtins = MANAGER.astroid_cache[BUILTINS]
- return builtins.getattr('slice')[0]
-
- def pytype(self):
- return '%s.slice' % BUILTINS
-
- def igetattr(self, attrname, context=None):
- if attrname == 'start':
- yield self._wrap_attribute(self.lower)
- elif attrname == 'stop':
- yield self._wrap_attribute(self.upper)
- elif attrname == 'step':
- yield self._wrap_attribute(self.step)
- else:
- for value in self.getattr(attrname, context=context):
- yield value
-
- def getattr(self, attrname, context=None):
- return self._proxied.getattr(attrname, context)
-
-
-class Starred(mixins.ParentAssignTypeMixin, NodeNG):
- """class representing a Starred node"""
- _astroid_fields = ('value',)
- _other_fields = ('ctx', )
- value = None
-
- def __init__(self, ctx=None, lineno=None, col_offset=None, parent=None):
- self.ctx = ctx
- super(Starred, self).__init__(lineno=lineno,
- col_offset=col_offset, parent=parent)
-
- def postinit(self, value=None):
- self.value = value
-
-
-class Subscript(NodeNG):
- """class representing a Subscript node"""
- _astroid_fields = ('value', 'slice')
- _other_fields = ('ctx', )
- value = None
- slice = None
-
- def __init__(self, ctx=None, lineno=None, col_offset=None, parent=None):
- self.ctx = ctx
- super(Subscript, self).__init__(lineno=lineno,
- col_offset=col_offset, parent=parent)
-
- # pylint: disable=redefined-builtin; had to use the same name as builtin ast module.
- def postinit(self, value=None, slice=None):
- self.value = value
- self.slice = slice
-
-
-class TryExcept(mixins.BlockRangeMixIn, Statement):
- """class representing a TryExcept node"""
- _astroid_fields = ('body', 'handlers', 'orelse',)
- body = None
- handlers = None
- orelse = None
-
- def postinit(self, body=None, handlers=None, orelse=None):
- self.body = body
- self.handlers = handlers
- self.orelse = orelse
-
- def _infer_name(self, frame, name):
- return name
-
- def block_range(self, lineno):
- """handle block line numbers range for try/except statements"""
- last = None
- for exhandler in self.handlers:
- if exhandler.type and lineno == exhandler.type.fromlineno:
- return lineno, lineno
- if exhandler.body[0].fromlineno <= lineno <= exhandler.body[-1].tolineno:
- return lineno, exhandler.body[-1].tolineno
- if last is None:
- last = exhandler.body[0].fromlineno - 1
- return self._elsed_block_range(lineno, self.orelse, last)
-
-
-class TryFinally(mixins.BlockRangeMixIn, Statement):
- """class representing a TryFinally node"""
- _astroid_fields = ('body', 'finalbody',)
- body = None
- finalbody = None
-
- def postinit(self, body=None, finalbody=None):
- self.body = body
- self.finalbody = finalbody
-
- def block_range(self, lineno):
- """handle block line numbers range for try/finally statements"""
- child = self.body[0]
- # py2.5 try: except: finally:
- if (isinstance(child, TryExcept) and child.fromlineno == self.fromlineno
- and lineno > self.fromlineno and lineno <= child.tolineno):
- return child.block_range(lineno)
- return self._elsed_block_range(lineno, self.finalbody)
-
-
-class Tuple(_BaseContainer):
- """class representing a Tuple node"""
-
- _other_fields = ('ctx',)
-
- def __init__(self, ctx=None, lineno=None,
- col_offset=None, parent=None):
- self.ctx = ctx
- super(Tuple, self).__init__(lineno, col_offset, parent)
-
- def pytype(self):
- return '%s.tuple' % BUILTINS
-
- def getitem(self, index, context=None):
- return _container_getitem(self, self.elts, index, context=context)
-
-
-class UnaryOp(NodeNG):
- """class representing an UnaryOp node"""
- _astroid_fields = ('operand',)
- _other_fields = ('op',)
- operand = None
-
- def __init__(self, op=None, lineno=None, col_offset=None, parent=None):
- self.op = op
- super(UnaryOp, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, operand=None):
- self.operand = operand
-
- # This is set by inference.py
- def _infer_unaryop(self, context=None):
- raise NotImplementedError
-
- def type_errors(self, context=None):
- """Return a list of TypeErrors which can occur during inference.
-
- Each TypeError is represented by a :class:`BadUnaryOperationMessage`,
- which holds the original exception.
- """
- try:
- results = self._infer_unaryop(context=context)
- return [result for result in results
- if isinstance(result, util.BadUnaryOperationMessage)]
- except exceptions.InferenceError:
- return []
-
-
-class While(mixins.BlockRangeMixIn, Statement):
- """class representing a While node"""
- _astroid_fields = ('test', 'body', 'orelse',)
- test = None
- body = None
- orelse = None
-
- def postinit(self, test=None, body=None, orelse=None):
- self.test = test
- self.body = body
- self.orelse = orelse
-
- @decorators.cachedproperty
- def blockstart_tolineno(self):
- return self.test.tolineno
-
- def block_range(self, lineno):
- """handle block line numbers range for and while statements"""
- return self. _elsed_block_range(lineno, self.orelse)
-
-
-class With(mixins.BlockRangeMixIn, mixins.AssignTypeMixin, Statement):
- """class representing a With node"""
- _astroid_fields = ('items', 'body')
- items = None
- body = None
-
- def postinit(self, items=None, body=None):
- self.items = items
- self.body = body
-
- @decorators.cachedproperty
- def blockstart_tolineno(self):
- return self.items[-1][0].tolineno
-
- def get_children(self):
- for expr, var in self.items:
- yield expr
- if var:
- yield var
- for elt in self.body:
- yield elt
-
-
-class AsyncWith(With):
- """Asynchronous `with` built with the `async` keyword."""
-
-
-class Yield(NodeNG):
- """class representing a Yield node"""
- _astroid_fields = ('value',)
- value = None
-
- def postinit(self, value=None):
- self.value = value
-
-
-class YieldFrom(Yield):
- """ Class representing a YieldFrom node. """
-
-
-class DictUnpack(NodeNG):
- """Represents the unpacking of dicts into dicts using PEP 448."""
-
-
-class FormattedValue(NodeNG):
- """Represents a PEP 498 format string."""
- _astroid_fields = ('value', 'format_spec')
- value = None
- conversion = None
- format_spec = None
-
- def postinit(self, value, conversion=None, format_spec=None):
- self.value = value
- self.conversion = conversion
- self.format_spec = format_spec
-
-
-class JoinedStr(NodeNG):
- """Represents a list of string expressions to be joined."""
- _astroid_fields = ('values',)
- values = None
-
- def postinit(self, values=None):
- self.values = values
-
-
-class Unknown(NodeNG):
- '''This node represents a node in a constructed AST where
- introspection is not possible. At the moment, it's only used in
- the args attribute of FunctionDef nodes where function signature
- introspection failed.
- '''
- def infer(self, context=None, **kwargs):
- '''Inference on an Unknown node immediately terminates.'''
- yield util.Uninferable
-
-
-# constants ##############################################################
-
-CONST_CLS = {
- list: List,
- tuple: Tuple,
- dict: Dict,
- set: Set,
- type(None): Const,
- type(NotImplemented): Const,
- }
-
-def _update_const_classes():
- """update constant classes, so the keys of CONST_CLS can be reused"""
- klasses = (bool, int, float, complex, str)
- if six.PY2:
- # pylint: disable=undefined-variable
- klasses += (unicode, long)
- klasses += (bytes,)
- for kls in klasses:
- CONST_CLS[kls] = Const
-_update_const_classes()
-
-
-def _two_step_initialization(cls, value):
- instance = cls()
- instance.postinit(value)
- return instance
-
-
-def _dict_initialization(cls, value):
- if isinstance(value, dict):
- value = tuple(value.items())
- return _two_step_initialization(cls, value)
-
-
-_CONST_CLS_CONSTRUCTORS = {
- List: _two_step_initialization,
- Tuple: _two_step_initialization,
- Dict: _dict_initialization,
- Set: _two_step_initialization,
- Const: lambda cls, value: cls(value)
-}
-
-
-def const_factory(value):
- """return an astroid node for a python value"""
- # XXX we should probably be stricter here and only consider stuff in
- # CONST_CLS or do better treatment: in case where value is not in CONST_CLS,
- # we should rather recall the builder on this value than returning an empty
- # node (another option being that const_factory shouldn't be called with something
- # not in CONST_CLS)
- assert not isinstance(value, NodeNG)
-
- # Hack for ignoring elements of a sequence
- # or a mapping, in order to avoid transforming
- # each element to an AST. This is fixed in 2.0
- # and this approach is a temporary hack.
- if isinstance(value, (list, set, tuple, dict)):
- elts = []
- else:
- elts = value
-
- try:
- initializer_cls = CONST_CLS[value.__class__]
- initializer = _CONST_CLS_CONSTRUCTORS[initializer_cls]
- return initializer(initializer_cls, elts)
- except (KeyError, AttributeError):
- node = EmptyNode()
- node.object = value
- return node
-
-
-# Backward-compatibility aliases
-
-Backquote = util.proxy_alias('Backquote', Repr)
-Discard = util.proxy_alias('Discard', Expr)
-AssName = util.proxy_alias('AssName', AssignName)
-AssAttr = util.proxy_alias('AssAttr', AssignAttr)
-Getattr = util.proxy_alias('Getattr', Attribute)
-CallFunc = util.proxy_alias('CallFunc', Call)
-From = util.proxy_alias('From', ImportFrom)
diff --git a/pymode/libs/logilab b/pymode/libs/logilab
new file mode 120000
index 00000000..1100ab45
--- /dev/null
+++ b/pymode/libs/logilab
@@ -0,0 +1 @@
+logilab-common-1.4.1/logilab
\ No newline at end of file
diff --git a/pymode/libs/logilab-common-1.4.1/COPYING b/pymode/libs/logilab-common-1.4.1/COPYING
new file mode 100644
index 00000000..d511905c
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/COPYING
@@ -0,0 +1,339 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users. This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it. (Some other Free Software Foundation software is covered by
+the GNU Lesser General Public License instead.) You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+ To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have. You must make sure that they, too, receive or can get the
+source code. And you must show them these terms so they know their
+rights.
+
+ We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+ Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software. If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+ Finally, any free program is threatened constantly by software
+patents. We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary. To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ GNU GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License. The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language. (Hereinafter, translation is included without limitation in
+the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+ 1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+ 2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) You must cause the modified files to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ b) You must cause any work that you distribute or publish, that in
+ whole or in part contains or is derived from the Program or any
+ part thereof, to be licensed as a whole at no charge to all third
+ parties under the terms of this License.
+
+ c) If the modified program normally reads commands interactively
+ when run, you must cause it, when started running for such
+ interactive use in the most ordinary way, to print or display an
+ announcement including an appropriate copyright notice and a
+ notice that there is no warranty (or else, saying that you provide
+ a warranty) and that users may redistribute the program under
+ these conditions, and telling the user how to view a copy of this
+ License. (Exception: if the Program itself is interactive but
+ does not normally print such an announcement, your work based on
+ the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+ a) Accompany it with the complete corresponding machine-readable
+ source code, which must be distributed under the terms of Sections
+ 1 and 2 above on a medium customarily used for software interchange; or,
+
+ b) Accompany it with a written offer, valid for at least three
+ years, to give any third party, for a charge no more than your
+ cost of physically performing source distribution, a complete
+ machine-readable copy of the corresponding source code, to be
+ distributed under the terms of Sections 1 and 2 above on a medium
+ customarily used for software interchange; or,
+
+ c) Accompany it with the information you received as to the offer
+ to distribute corresponding source code. (This alternative is
+ allowed only for noncommercial distribution and only if you
+ received the program in object code or executable form with such
+ an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it. For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable. However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License. Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+ 5. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Program or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+ 6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+ 7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all. For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+ 8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded. In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+ 9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation. If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+ 10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission. For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this. Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+ NO WARRANTY
+
+ 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+ 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 2 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License along
+ with this program; if not, write to the Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+ Gnomovision version 69, Copyright (C) year name of author
+ Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+ `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+ , 1 April 1989
+ Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs. If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.
diff --git a/pymode/libs/logilab-common-1.4.1/COPYING.LESSER b/pymode/libs/logilab-common-1.4.1/COPYING.LESSER
new file mode 100644
index 00000000..2d2d780e
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/COPYING.LESSER
@@ -0,0 +1,510 @@
+
+ GNU LESSER GENERAL PUBLIC LICENSE
+ Version 2.1, February 1999
+
+ Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+ 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL. It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+Licenses are intended to guarantee your freedom to share and change
+free software--to make sure the software is free for all its users.
+
+ This license, the Lesser General Public License, applies to some
+specially designated software packages--typically libraries--of the
+Free Software Foundation and other authors who decide to use it. You
+can use it too, but we suggest you first think carefully about whether
+this license or the ordinary General Public License is the better
+strategy to use in any particular case, based on the explanations
+below.
+
+ When we speak of free software, we are referring to freedom of use,
+not price. Our General Public Licenses are designed to make sure that
+you have the freedom to distribute copies of free software (and charge
+for this service if you wish); that you receive source code or can get
+it if you want it; that you can change the software and use pieces of
+it in new free programs; and that you are informed that you can do
+these things.
+
+ To protect your rights, we need to make restrictions that forbid
+distributors to deny you these rights or to ask you to surrender these
+rights. These restrictions translate to certain responsibilities for
+you if you distribute copies of the library or if you modify it.
+
+ For example, if you distribute copies of the library, whether gratis
+or for a fee, you must give the recipients all the rights that we gave
+you. You must make sure that they, too, receive or can get the source
+code. If you link other code with the library, you must provide
+complete object files to the recipients, so that they can relink them
+with the library after making changes to the library and recompiling
+it. And you must show them these terms so they know their rights.
+
+ We protect your rights with a two-step method: (1) we copyright the
+library, and (2) we offer you this license, which gives you legal
+permission to copy, distribute and/or modify the library.
+
+ To protect each distributor, we want to make it very clear that
+there is no warranty for the free library. Also, if the library is
+modified by someone else and passed on, the recipients should know
+that what they have is not the original version, so that the original
+author's reputation will not be affected by problems that might be
+introduced by others.
+
+ Finally, software patents pose a constant threat to the existence of
+any free program. We wish to make sure that a company cannot
+effectively restrict the users of a free program by obtaining a
+restrictive license from a patent holder. Therefore, we insist that
+any patent license obtained for a version of the library must be
+consistent with the full freedom of use specified in this license.
+
+ Most GNU software, including some libraries, is covered by the
+ordinary GNU General Public License. This license, the GNU Lesser
+General Public License, applies to certain designated libraries, and
+is quite different from the ordinary General Public License. We use
+this license for certain libraries in order to permit linking those
+libraries into non-free programs.
+
+ When a program is linked with a library, whether statically or using
+a shared library, the combination of the two is legally speaking a
+combined work, a derivative of the original library. The ordinary
+General Public License therefore permits such linking only if the
+entire combination fits its criteria of freedom. The Lesser General
+Public License permits more lax criteria for linking other code with
+the library.
+
+ We call this license the "Lesser" General Public License because it
+does Less to protect the user's freedom than the ordinary General
+Public License. It also provides other free software developers Less
+of an advantage over competing non-free programs. These disadvantages
+are the reason we use the ordinary General Public License for many
+libraries. However, the Lesser license provides advantages in certain
+special circumstances.
+
+ For example, on rare occasions, there may be a special need to
+encourage the widest possible use of a certain library, so that it
+becomes a de-facto standard. To achieve this, non-free programs must
+be allowed to use the library. A more frequent case is that a free
+library does the same job as widely used non-free libraries. In this
+case, there is little to gain by limiting the free library to free
+software only, so we use the Lesser General Public License.
+
+ In other cases, permission to use a particular library in non-free
+programs enables a greater number of people to use a large body of
+free software. For example, permission to use the GNU C Library in
+non-free programs enables many more people to use the whole GNU
+operating system, as well as its variant, the GNU/Linux operating
+system.
+
+ Although the Lesser General Public License is Less protective of the
+users' freedom, it does ensure that the user of a program that is
+linked with the Library has the freedom and the wherewithal to run
+that program using a modified version of the Library.
+
+ The precise terms and conditions for copying, distribution and
+modification follow. Pay close attention to the difference between a
+"work based on the library" and a "work that uses the library". The
+former contains code derived from the library, whereas the latter must
+be combined with the library in order to run.
+
+ GNU LESSER GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License Agreement applies to any software library or other
+program which contains a notice placed by the copyright holder or
+other authorized party saying it may be distributed under the terms of
+this Lesser General Public License (also called "this License").
+Each licensee is addressed as "you".
+
+ A "library" means a collection of software functions and/or data
+prepared so as to be conveniently linked with application programs
+(which use some of those functions and data) to form executables.
+
+ The "Library", below, refers to any such software library or work
+which has been distributed under these terms. A "work based on the
+Library" means either the Library or any derivative work under
+copyright law: that is to say, a work containing the Library or a
+portion of it, either verbatim or with modifications and/or translated
+straightforwardly into another language. (Hereinafter, translation is
+included without limitation in the term "modification".)
+
+ "Source code" for a work means the preferred form of the work for
+making modifications to it. For a library, complete source code means
+all the source code for all modules it contains, plus any associated
+interface definition files, plus the scripts used to control
+compilation and installation of the library.
+
+ Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running a program using the Library is not restricted, and output from
+such a program is covered only if its contents constitute a work based
+on the Library (independent of the use of the Library in a tool for
+writing it). Whether that is true depends on what the Library does
+and what the program that uses the Library does.
+
+ 1. You may copy and distribute verbatim copies of the Library's
+complete source code as you receive it, in any medium, provided that
+you conspicuously and appropriately publish on each copy an
+appropriate copyright notice and disclaimer of warranty; keep intact
+all the notices that refer to this License and to the absence of any
+warranty; and distribute a copy of this License along with the
+Library.
+
+ You may charge a fee for the physical act of transferring a copy,
+and you may at your option offer warranty protection in exchange for a
+fee.
+
+ 2. You may modify your copy or copies of the Library or any portion
+of it, thus forming a work based on the Library, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) The modified work must itself be a software library.
+
+ b) You must cause the files modified to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ c) You must cause the whole of the work to be licensed at no
+ charge to all third parties under the terms of this License.
+
+ d) If a facility in the modified Library refers to a function or a
+ table of data to be supplied by an application program that uses
+ the facility, other than as an argument passed when the facility
+ is invoked, then you must make a good faith effort to ensure that,
+ in the event an application does not supply such function or
+ table, the facility still operates, and performs whatever part of
+ its purpose remains meaningful.
+
+ (For example, a function in a library to compute square roots has
+ a purpose that is entirely well-defined independent of the
+ application. Therefore, Subsection 2d requires that any
+ application-supplied function or table used by this function must
+ be optional: if the application does not supply it, the square
+ root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Library,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Library, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote
+it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library
+with the Library (or with a work based on the Library) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may opt to apply the terms of the ordinary GNU General Public
+License instead of this License to a given copy of the Library. To do
+this, you must alter all the notices that refer to this License, so
+that they refer to the ordinary GNU General Public License, version 2,
+instead of to this License. (If a newer version than version 2 of the
+ordinary GNU General Public License has appeared, then you can specify
+that version instead if you wish.) Do not make any other change in
+these notices.
+
+ Once this change is made in a given copy, it is irreversible for
+that copy, so the ordinary GNU General Public License applies to all
+subsequent copies and derivative works made from that copy.
+
+ This option is useful when you wish to copy part of the code of
+the Library into a program that is not a library.
+
+ 4. You may copy and distribute the Library (or a portion or
+derivative of it, under Section 2) in object code or executable form
+under the terms of Sections 1 and 2 above provided that you accompany
+it with the complete corresponding machine-readable source code, which
+must be distributed under the terms of Sections 1 and 2 above on a
+medium customarily used for software interchange.
+
+ If distribution of object code is made by offering access to copy
+from a designated place, then offering equivalent access to copy the
+source code from the same place satisfies the requirement to
+distribute the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 5. A program that contains no derivative of any portion of the
+Library, but is designed to work with the Library by being compiled or
+linked with it, is called a "work that uses the Library". Such a
+work, in isolation, is not a derivative work of the Library, and
+therefore falls outside the scope of this License.
+
+ However, linking a "work that uses the Library" with the Library
+creates an executable that is a derivative of the Library (because it
+contains portions of the Library), rather than a "work that uses the
+library". The executable is therefore covered by this License.
+Section 6 states terms for distribution of such executables.
+
+ When a "work that uses the Library" uses material from a header file
+that is part of the Library, the object code for the work may be a
+derivative work of the Library even though the source code is not.
+Whether this is true is especially significant if the work can be
+linked without the Library, or if the work is itself a library. The
+threshold for this to be true is not precisely defined by law.
+
+ If such an object file uses only numerical parameters, data
+structure layouts and accessors, and small macros and small inline
+functions (ten lines or less in length), then the use of the object
+file is unrestricted, regardless of whether it is legally a derivative
+work. (Executables containing this object code plus portions of the
+Library will still fall under Section 6.)
+
+ Otherwise, if the work is a derivative of the Library, you may
+distribute the object code for the work under the terms of Section 6.
+Any executables containing that work also fall under Section 6,
+whether or not they are linked directly with the Library itself.
+
+ 6. As an exception to the Sections above, you may also combine or
+link a "work that uses the Library" with the Library to produce a
+work containing portions of the Library, and distribute that work
+under terms of your choice, provided that the terms permit
+modification of the work for the customer's own use and reverse
+engineering for debugging such modifications.
+
+ You must give prominent notice with each copy of the work that the
+Library is used in it and that the Library and its use are covered by
+this License. You must supply a copy of this License. If the work
+during execution displays copyright notices, you must include the
+copyright notice for the Library among them, as well as a reference
+directing the user to the copy of this License. Also, you must do one
+of these things:
+
+ a) Accompany the work with the complete corresponding
+ machine-readable source code for the Library including whatever
+ changes were used in the work (which must be distributed under
+ Sections 1 and 2 above); and, if the work is an executable linked
+ with the Library, with the complete machine-readable "work that
+ uses the Library", as object code and/or source code, so that the
+ user can modify the Library and then relink to produce a modified
+ executable containing the modified Library. (It is understood
+ that the user who changes the contents of definitions files in the
+ Library will not necessarily be able to recompile the application
+ to use the modified definitions.)
+
+ b) Use a suitable shared library mechanism for linking with the
+ Library. A suitable mechanism is one that (1) uses at run time a
+ copy of the library already present on the user's computer system,
+ rather than copying library functions into the executable, and (2)
+ will operate properly with a modified version of the library, if
+ the user installs one, as long as the modified version is
+ interface-compatible with the version that the work was made with.
+
+ c) Accompany the work with a written offer, valid for at least
+ three years, to give the same user the materials specified in
+ Subsection 6a, above, for a charge no more than the cost of
+ performing this distribution.
+
+ d) If distribution of the work is made by offering access to copy
+ from a designated place, offer equivalent access to copy the above
+ specified materials from the same place.
+
+ e) Verify that the user has already received a copy of these
+ materials or that you have already sent this user a copy.
+
+ For an executable, the required form of the "work that uses the
+Library" must include any data and utility programs needed for
+reproducing the executable from it. However, as a special exception,
+the materials to be distributed need not include anything that is
+normally distributed (in either source or binary form) with the major
+components (compiler, kernel, and so on) of the operating system on
+which the executable runs, unless that component itself accompanies
+the executable.
+
+ It may happen that this requirement contradicts the license
+restrictions of other proprietary libraries that do not normally
+accompany the operating system. Such a contradiction means you cannot
+use both them and the Library together in an executable that you
+distribute.
+
+ 7. You may place library facilities that are a work based on the
+Library side-by-side in a single library together with other library
+facilities not covered by this License, and distribute such a combined
+library, provided that the separate distribution of the work based on
+the Library and of the other library facilities is otherwise
+permitted, and provided that you do these two things:
+
+ a) Accompany the combined library with a copy of the same work
+ based on the Library, uncombined with any other library
+ facilities. This must be distributed under the terms of the
+ Sections above.
+
+ b) Give prominent notice with the combined library of the fact
+ that part of it is a work based on the Library, and explaining
+ where to find the accompanying uncombined form of the same work.
+
+ 8. You may not copy, modify, sublicense, link with, or distribute
+the Library except as expressly provided under this License. Any
+attempt otherwise to copy, modify, sublicense, link with, or
+distribute the Library is void, and will automatically terminate your
+rights under this License. However, parties who have received copies,
+or rights, from you under this License will not have their licenses
+terminated so long as such parties remain in full compliance.
+
+ 9. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Library or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Library (or any work based on the
+Library), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Library or works based on it.
+
+ 10. Each time you redistribute the Library (or any work based on the
+Library), the recipient automatically receives a license from the
+original licensor to copy, distribute, link with or modify the Library
+subject to these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties with
+this License.
+
+ 11. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Library at all. For example, if a patent
+license would not permit royalty-free redistribution of the Library by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply, and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+ 12. If the distribution and/or use of the Library is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Library under this License
+may add an explicit geographical distribution limitation excluding those
+countries, so that distribution is permitted only in or among
+countries not thus excluded. In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+ 13. The Free Software Foundation may publish revised and/or new
+versions of the Lesser General Public License from time to time.
+Such new versions will be similar in spirit to the present version,
+but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library
+specifies a version number of this License which applies to it and
+"any later version", you have the option of following the terms and
+conditions either of that version or of any later version published by
+the Free Software Foundation. If the Library does not specify a
+license version number, you may choose any version ever published by
+the Free Software Foundation.
+
+ 14. If you wish to incorporate parts of the Library into other free
+programs whose distribution conditions are incompatible with these,
+write to the author to ask for permission. For software which is
+copyrighted by the Free Software Foundation, write to the Free
+Software Foundation; we sometimes make exceptions for this. Our
+decision will be guided by the two goals of preserving the free status
+of all derivatives of our free software and of promoting the sharing
+and reuse of software generally.
+
+ NO WARRANTY
+
+ 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
+WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
+EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
+OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
+KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
+LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
+THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
+WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
+AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
+FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
+CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
+LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
+RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
+FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
+SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Libraries
+
+ If you develop a new library, and you want it to be of the greatest
+possible use to the public, we recommend making it free software that
+everyone can redistribute and change. You can do so by permitting
+redistribution under these terms (or, alternatively, under the terms
+of the ordinary General Public License).
+
+ To apply these terms, attach the following notices to the library.
+It is safest to attach them to the start of each source file to most
+effectively convey the exclusion of warranty; and each file should
+have at least the "copyright" line and a pointer to where the full
+notice is found.
+
+
+
+ Copyright (C)
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or
+your school, if any, to sign a "copyright disclaimer" for the library,
+if necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the
+ library `Frob' (a library for tweaking knobs) written by James
+ Random Hacker.
+
+ , 1 April 1990
+ Ty Coon, President of Vice
+
+That's all there is to it!
+
+
diff --git a/pymode/libs/logilab-common-1.4.1/ChangeLog b/pymode/libs/logilab-common-1.4.1/ChangeLog
new file mode 100644
index 00000000..95c96f6a
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/ChangeLog
@@ -0,0 +1,1613 @@
+ChangeLog for logilab.common
+============================
+
+2016-10-03 -- 1.3.0
+
+ * pytest: executable deprecated and renamed as logilab-pytest to prevent
+ conflict with pytest provided by http://pytest.org/
+
+2016-03-15 -- 1.2.0
+
+ * pytest: TraceController class, pause_tracing and resume_tracing
+ functions, deprecated from 0.63.1, got removed. The nocoverage
+ and pause_trace utilities are now available from the testlib
+ module rather than pytest.
+
+ * date: datetime2ticks uses the milliseconds from the datetime objects
+
+2015-10-12 -- 1.1.0
+ * configuration: have a stable order for sections (#298658)
+
+ * testlib: clean out deprecated TestCase methods (#1716063), move pytest
+ specifics to pytest.py (#1716053)
+
+ * fix a few python3 bugs in umessage, configuration and optik_ext modules
+
+ * testlib: report failures and skips in generative tests properly
+
+ * optik_ext: return bytes as ints and not floats (#2086835)
+
+2015-07-08 -- 1.0.2
+ * declare setuptools requirement in __pkginfo__/setup.py
+
+ * randomize order of test modules in pytest -t
+
+2015-07-01 -- 1.0.1
+ * restore __pkginfo__.version, which pylint < 1.4.4 uses
+
+2015-06-30 -- 1.0.0
+ * remove unused/deprecated modules: cli, contexts, corbautils, dbf,
+ pyro_ext, xmlrpcutils. __pkginfo__ is no longer installed.
+
+ * major layout change
+
+ * use setuptools exclusively
+
+ * 'logilab' is now a proper namespace package
+
+ * modutils: basic support for namespace packages
+
+ * registry: ambiguous selects now raise a specific exception
+
+ * testlib: better support for non-pytest launchers
+
+ * testlib: Tags() now work with py3k
+
+2014-11-30 -- 0.63.2
+ * fix 2 minor regressions from 0.63.1
+
+2014-11-28 -- 0.63.1
+ * fix fallout from py3k conversion
+
+ * pytest: fix TestSuite.run wrapper (#280806)
+
+ * daemon: change umask after creating pid file
+
+2014-11-05 -- 0.63.0
+ * drop compatibility with python <= 2.5 (#264017)
+
+ * fix textutils.py doctests for py3k
+
+ * produce a clearer exception when dot is not installed (#253516)
+
+ * make source python3-compatible (3.3+), without using 2to3. This
+ introduces a dependency on six (#265740)
+
+ * fix umessage header decoding on python 3.3 and newer (#149345)
+
+ * WARNING: the compat module no longer exports 'callable', 'izip', 'imap',
+ 'chain', 'sum', 'enumerate', 'frozenset', 'reversed', 'sorted', 'max',
+ 'relpath', 'InheritableSet', or any subprocess-related names.
+
+2014-07-30 -- 0.62.1
+ * shellutils: restore py 2.5 compat by removing usage of class decorator
+
+ * pytest: drop broken --coverage option
+
+ * testlib: support for skipping whole test class and conditional skip, don't
+ run setUp for skipped tests
+
+ * configuration: load options in config file order (#185648)
+
+
+
+2014-03-07 -- 0.62.0
+ * modutils: cleanup_sys_modules returns the list of cleaned modules
+
+
+
+2014-02-11 -- 0.61.0
+ * pdf_ext: removed, it had no known users (CVE-2014-1838)
+
+ * shellutils: fix tempfile issue in Execute, and deprecate it
+ (CVE-2014-1839)
+
+ * pytest: use 'env' to run the python interpreter
+
+ * graph: ensure output is ordered on node and graph ids (#202314)
+
+
+
+2013-16-12 -- 0.60.1
+ * modutils:
+
+ * don't propagate IOError when package's __init__.py file doesn't
+ exist (#174606)
+
+ * ensure file is closed, may cause pb depending on the interpreter, eg
+ pypy) (#180876)
+
+ * fix support for `extend_path` based nested namespace packages ;
+ Report and patch by John Johnson (#177651)
+
+ * fix some cases of failing python3 install on windows platform / cross
+ compilation (#180836)
+
+
+
+2013-07-26 -- 0.60.0
+ * configuration: rename option_name method into option_attrname (#140667)
+
+ * deprecation: new DeprecationManager class (closes #108205)
+
+ * modutils:
+
+ - fix typo causing name error in python3 / bad message in python2
+ (#136037)
+ - fix python3.3 crash in file_from_modpath due to implementation
+ change of imp.find_module wrt builtin modules (#137244)
+
+ * testlib: use assertCountEqual instead of assertSameElements/assertItemsEqual
+ (deprecated), fixing crash with python 3.3 (#144526)
+
+ * graph: use codecs.open avoid crash when writing utf-8 data under python3
+ (#155138)
+
+
+
+2013-04-16 -- 0.59.1
+ * graph: added pruning of the recursive search tree for detecting cycles in
+ graphs (closes #2469)
+
+ * testlib: check for generators in with_tempdir (closes #117533)
+
+ * registry:
+
+ - select_or_none should not silent ObjectNotFound exception
+ (closes #119819)
+ - remove 2 accidentally introduced tabs breaking python 3 compat
+ (closes #117580)
+
+ * fix umessages test w/ python 3 and LC_ALL=C (closes #119967, report and
+ patch by Ian Delaney)
+
+
+
+2013-01-21 -- 0.59.0
+ * registry:
+
+ - introduce RegistrableObject base class, mandatory to make
+ classes automatically registrable, and cleanup code
+ accordingly
+ - introduce objid and objname methods on Registry instead of
+ classid function and inlined code plus other refactorings to allow
+ arbitrary objects to be registered, provided they inherit from new
+ RegistrableInstance class (closes #98742)
+ - deprecate usage of leading underscore to skip object registration, using
+ __abstract__ explicitly is better and notion of registered object 'name'
+ is now somewhat fuzzy
+ - use register_all when no registration callback defined (closes #111011)
+
+ * logging_ext: on windows, use colorama to display colored logs, if available (closes #107436)
+
+ * packaging: remove references to ftp at logilab
+
+ * deprecations: really check them
+
+ * packaging: steal spec file from fedora (closes #113099)
+
+ * packaging force python2.6 on rhel5 (closes #113099)
+
+ * packaging Update download and project urls (closes #113099)
+
+ * configuration: enhance merge_options function (closes #113458)
+
+ * decorators: fix @monkeypatch decorator contract for dark corner
+ cases such as monkeypatching of a callable instance: no more
+ turned into an unbound method, which was broken in python 3 and
+ probably not used anywhere (actually closes #104047).
+
+
+
+2012-11-14 -- 0.58.3
+ * date: fix ustrftime() impl. for python3 (closes #82161, patch by Arfrever
+ Frehtes Taifersar Arahesis) and encoding detection for python2 (closes
+ #109740)
+
+ * other python3 code and test fixes (closes #104047)
+
+ * registry: Store.setdefault shouldn't raise RegistryNotFound (closes #111010)
+
+ * table: stop encoding to iso-8859-1, use unicode (closes #105847)
+
+ * setup: properly install additional files during build instead of install (closes #104045)
+
+
+
+2012-07-30 -- 0.58.2
+ * modutils: fixes (closes #100757 and #100935)
+
+
+
+2012-07-17 -- 0.58.1
+ * modutils, testlib: be more python implementation independant (closes #99493 and #99627)
+
+
+
+2012-04-12 -- 0.58.0
+ * new `registry` module containing a backport of CubicWeb selectable objects registry (closes #84654)
+
+ * testlib: DocTestCase fix builtins pollution after doctest execution.
+
+ * shellutil: add argument to ``ProgressBar.update`` to tune cursor progression (closes #88981)
+
+ * deprecated: new DeprecationWrapper class (closes #88942)
+
+
+
+2012-03-22 -- 0.57.2
+ * texutils: apply_units raise ValueError if string isn'nt valid (closes #88808)
+
+ * daemon: don't call putenv directly
+
+ * pytest: do not enable extra warning other than DeprecationWarning.
+
+ * testlib: DocTestCase fix builtins pollution after doctest execution.
+
+ * testlib: replace sys.exit with raise ImportError (closes: #84159)
+
+ * fix license in README
+
+ * add trove classifiers (tell about python 3 support for pypi)
+
+
+
+2011-10-28 -- 0.57.1
+ * daemon: change $HOME after dropping privileges (closes #81297)
+
+ * compat: method_type for py3k use instance of the class to have a
+ real instance method (closes: #79268)
+
+
+
+2011-10-12 -- 0.57.0
+ * only install unittest2 when python version < 2.7 (closes: #76068)
+
+ * daemon: make pidfile world-readable (closes #75968)
+
+ * daemon: remove unused(?) DaemonMixin class
+
+ * update compat module for callable() and method_type()
+
+ * decorators: fix monkeypatch py3k compat (closes #75290)
+
+ * decorators: provide a @cachedproperty decorator
+
+
+
+2011-09-08 -- 0.56.2
+ * daemon: call initgroups/setgid before setuid (closes #74173)
+
+ * decorators: @monkeypatch should produce a method object (closes #73920)
+
+ * modutils: allow overriding of _getobj by suppressing mangling
+
+
+
+2011-08-05 -- 0.56.1
+ * clcommands: #72450 --rc-file option doesn't work
+
+
+
+2011-06-09 -- 0.56.0
+ * clcommands: make registration possible by class decoration
+
+ * date: new datetime/delta <-> seconds/days conversion function
+
+ * decorators: refactored @cached to allow usages such as
+ @cached(cacheattr='_cachename') while keeping bw compat
+
+
+
+2011-04-01 -- 0.55.2
+ * new function for password generation in shellutils
+
+ * pyro_ext: allow to create a server without registering with a pyrons
+
+
+
+2011-03-28 -- 0.55.1
+ * fix date.ustrftime break if year <= 1900
+
+ * fix graph.py incorrectly builds command lines using %s to call dot
+
+ * new functions to get UTC datetime / time
+
+
+
+2011-02-18 -- 0.55.0
+ * new urllib2ext module providing a GSSAPI authentication handler, based on python-kerberos
+
+ * graph: test and fix ordered_nodes() [closes #60288]
+
+ * changelog: refactor ChangeLog class to ease overriding
+
+ * testlib: Fix tag handling for generator.
+
+
+
+2011-01-12 -- 0.54.0
+ * dropped python 2.3 support
+
+ * daemon: we can now specify umask to daemonize function, and it return
+ different exit code according to the process
+
+ * pyro_ext: new ns_reregister function to ensure a name is still properly
+ registered in the pyro name server
+
+ * hg: new incoming/outgoing functions backward compatible with regards to
+ mercurial version (eg hg 1.6 and earlier)
+
+ * testlib/pytest: more deprecation and removed code. Still on the way to
+ unittest2
+
+
+
+2010-11-15 -- 0.53.0
+ * first python3.x compatible release
+
+ * __init__: tempattr context manager
+
+ * shellutils: progress context manager
+
+
+
+2010-10-11 -- 0.52.1
+ * configuration: fix pb with option names as unicode string w/
+ python 2.5. Makes OptionError available through the module
+
+ * textutils: text_to_dict skip comments (# lines)
+
+ * compat: dropped some 2.2 compat
+
+ * modutils: Consider arch-specific installation for STD_LIB_DIR definition
+
+
+
+2010-09-28 -- 0.52.0
+ * testlib is now based on unittest2, to prepare its own extinction.
+ Warning are printed so you can easily migrate step by step.
+
+ * restored python 2.3 compat in some modules, so one get a change to run
+ pylint at least
+
+ * textutils: use NFKD decomposition in unormalize()
+
+ * logging_ext: don't try to use ansi colorized formatter when not in debug
+ mode
+
+
+
+2010-09-10 -- 0.51.1
+ * logging_ext: init_log function splitted into smaller chunk to ease reuse
+ in other contexts
+
+ * clcommands: enhanced/cleaned api, nicer usage display
+
+ * various pylint detected errors fixed
+
+
+
+2010-08-26 -- 0.51.0
+ * testlib: don't raise string exception (closes #35331)
+
+ * hg: new module regrouping some mercurial utility functions
+
+ * clcommands: refactored to get more object oriented api.
+
+ * optparser: module is now deprecated, use clcommands instead
+
+ * textutils: new split_url_or_path and text_to_dict functions
+
+ * logging_ext:
+ - init_log now accept optionaly any arbitrary handler
+ - threshold default to DEBUG if debug flag is true and no threshold specified
+
+ * date: new ustrftime implementation working around datetime limitaion on dates < 1900
+
+
+
+2010-06-04 -- 0.50.3
+ * logging: added new optional kw argument to init_log rotating_parameters
+
+ * date: fix nb_open_days() codomain, positive natural numbers are expected
+
+ * configuration:
+ - skip option with no type, avoid pb with generated option such as long-help
+ - handle level on man page generation
+
+
+
+2010-05-21 -- 0.50.2
+ * fix licensing information: LGPL v2.1 or greater
+
+ * daemon: new daemonize function
+
+ * modutils: fix some false negative of is_standard_module with
+ 'from module import something" where something isn't a submodule
+
+ * optik_ext: fix help generation for normal optparse using script if
+ optik_ext has been imported (#24450)
+
+ * textutils support 256 colors when available
+
+ * testlib] add option splitlines to assertTextEquals
+
+
+
+2010-04-26 -- 0.50.1
+ * implements __repr__ on nullobject
+
+ * configuration: avoid crash by skipping option without 'type'
+ entry while input a config
+
+ * pyro_ext: raise PyroError instead of exception
+
+
+
+2010-04-20 -- 0.50.0
+ * graph:
+ - generate methods now takes an optional mapfile argument to generate
+ html image maps
+ - new ordered_nodes function taking a dependency graph dict as arguments
+ and returning an ordered list of nodes
+
+ * configuration:
+ - nicer serialization of bytes / time option
+ - may now contains several option provider with the same name
+ - consider 'level' in option dict, --help displaying only option with level
+ 0, and automatically adding --long-help options for higher levels
+
+ * textutils: case insensitive apply_unit
+
+ * sphinx_ext: new module usable as a sphinx pluggin and containing a new
+ 'autodocstring' directive
+
+ * ureports: output instead of for strict xhtml compliance
+
+ * decorators: @cached propery copy inner function docstring
+
+
+
+2010-03-16 -- 0.49.0
+ * date: new 'totime' function
+
+ * adbh, db, sqlgen modules moved to the new logilab-database package
+
+ * pytest: when -x option is given, stop on the first error even if
+ there are multiple test directories
+
+
+
+2010-02-26 -- 0.48.1
+ * adbh: added dbport optional argument to [backup|restore]_commands
+
+ * db: fix date processing for SQLServer 2005
+
+ * testlib: improve XML assertion by using ElementTree parser and a new 'context' lines argument
+
+
+
+2010-02-17 -- 0.48.0
+ * date: fixed mx date time compat for date_range (#20651)
+
+ * testlib: generative test should not be interrupted by self.skip() (#20648)
+
+
+
+2010-02-10 -- 0.47.0
+ * adbh: changed backup / restore api (BREAKS COMPAT):
+ - backup_command is now backup_commands (eg return a list of commands)
+ - each command returned in backup_commands/restore_commands may now
+ be list that may be used as argument to subprocess.call, or a string
+ which will the requires a subshell
+ - new sql_rename_col method
+
+ * deprecation: deprecated now takes an optional 'stacklevel' argument, default to 2
+
+ * date: some functions to ease python's datetime module usage have been backported
+ from cubicweb
+
+
+
+2009-12-23 -- 0.46.0
+ * db / adbh: added SQL Server support using Pyodbc
+
+ * db:
+ - New optional extra_args argument to get_connection.
+ - Support Windows Auth for SQLServer by giving
+ extra_args='Trusted_Connection' to the sqlserver2005 driver
+
+
+
+2009-11-23 -- 0.45.2
+ * configuration:
+ - proper bytes and time option types support
+ - make Method usable as 'callback' value
+ - fix #8849 Using plugins, options and .pylintrc crashes PyLint
+
+ * graph: fix has_path returned value to include the destination node, else we get
+ an empty list which makes think there is no path (test added)
+
+
+
+2009-08-26 -- 0.45.0
+ * added function for parsing XML processing instructions
+
+
+
+2009-08-07 -- 0.44.0
+ * remove code deprecated for a while now
+
+ * shellutils: replace confirm function by RawInput class /ASK singleton
+
+ * deprecation: new deprecated decorator, replacing both obsolete and deprecated_function
+
+
+
+2009-07-21 -- 0.43.0
+ * dbf: a DBF reader which reads Visual Fox Pro DBF format with Memo field (module from Yusdi Santoso)
+
+ * shellutils:
+ - #9764 add title to shellutils.ProgressBar
+ - #9796 new confirm function
+
+ * testlib:
+ - simplify traceback manipulation (skip first frames corresponding to testlib functions)
+ - -c now captures DeprecationWarnings
+
+ * sphinxutils: simplified API
+
+ * modutils: new cleanup_sys_modules function that removes modules under a list
+ of directories from sys.modules
+
+
+
+2009-07-17 -- 0.42.0
+ * pyro_ext: new module for pyro utilities
+
+ * adbh: fix default set_null_allowed implementation, new case_sensitive
+ resource descriptor
+
+
+
+2009-06-03 -- 0.41.0
+ * modutils: new extrapath argument to modpath_from_file (see function's
+ docstring for explanation)
+
+ * adbh: new alter_column_support flag, sql_set_null_allowed and
+ sql_change_col_type methods
+
+
+
+2009-05-28 -- 0.40.1
+ * date: handle both mx.DateTime and datetime representations
+
+ * db: use sqlite native module's Binary, not StringIO
+
+
+
+2009-05-14 -- 0.40.0
+ * python < 2.3 are now officially unsupported
+
+ * #9162: new module with some sphinx utilities
+
+ * #9166: use a global variable to control mx datetime / py datetime usage
+
+ * db: add time adapter for pysqlite2, fix mysql bool and string handling
+
+ * configuration: don't print default for store_true / store_false option
+ or option with None as default
+
+
+
+2009-04-07 -- 0.39.1
+ * fix #6760 umessage.decode_QP() crashes on unknown encoding
+
+
+
+2009-03-25 -- 0.39.0
+ * fix #7915 (shellutils unusable under windows)
+
+ * testlib:
+
+ * new profile option using cProfile
+
+ * allows to skip a module by raising TestSkipped from module import
+
+ * modutils: locate modules in zip/egg archive
+
+ * db: USE_MX_DATETIME global to control usage of mx.DateTime / py datetime
+
+
+
+2009-01-26 -- 0.38.0
+ * setuptools / easy_install support!
+
+ * removed some old backward compat code
+
+ * adbh: new intersect_all_support attribute
+
+ * contexts: new pushd context manager
+
+ * shellutils: enhance acquire_lock method w/ race condition
+
+ * configuration: fix case sensitivity pb w/ config file sections
+
+ * pytest: reimplemented colorization
+
+
+
+2009-01-08 -- 0.37.2
+ * configuration: encoding handling for configuration file generation
+
+ * adbh: fix Datetime type map for mysql
+
+ * logging_ext: drop lldebug level which shouldn't be there
+
+
+
+2008-12-11 -- 0.37.1
+ * contexts: make the module syntactically correct wrt python2.4
+
+
+
+2008-12-09 -- 0.37.0
+ * contexts: new module for context managers, keeping py <2.4 syntax compat
+ for distribution (only `tempdir` cm for now)
+
+ * tasksqueue: new module containing a class to handle prioritized tasks queue
+
+ * proc: new module for process information / resource control
+
+ * optik_ext: new time/bytes option types, using textutils conversion function
+
+ * logging_ext: new set_log_methods / init_log utility functions
+
+
+
+2008-10-30 -- 0.36.0
+ * configuration:
+ - option yn is now behaving like a flag (i.e --ex : if ex.default=True and --ex in sys.args then ex.value=False)
+ - new attribute hide in option (i.e --ex : if --ex has 'hide':True then the option will not be displayed in man or --help)
+
+ * pytest:
+ - add colors in display
+ - new option --restart that skips tests that succeeded on last run
+
+ * cache: new herits from dict class
+
+ * decorators: add @require_version @require_module that skip test if decorators are not satisfied
+
+
+
+2008-10-09 -- 0.35.3
+ * graph: new has_path method
+
+
+
+2008-10-01 -- 0.35.2
+ * configuration:
+ - fix #6011: lgc.configuration ignore customized option values
+ - fix #3278: man page generation broken
+
+ * dropped context.py module which broke the debian package when
+ some python <2.5 is installed (#5979)
+
+
+
+2008-09-10 -- 0.35.0
+ * fix #5945: wrong edge properties in graph.DotBackend
+
+ * testlib: filter tests with tag decorator
+
+ * shellutils: new simple unzip function
+
+
+
+2008-08-07 -- 0.34.0
+ * changelog: properly adds new line at the end of each entry
+
+ * testlib: add a with_tempdir decorator ensuring all temporary files and dirs are removed
+
+ * graph: improve DotBackend configuration. graphiz rendered can now be selected
+ and additional graph parameter used
+
+ * db: support of Decimal Type
+
+
+
+2008-06-25 -- 0.33.0
+ * decorators: new @locked decorator
+
+ * cache: make it thread safe, changed behaviour so that when cache size is 0
+ and __delitem__ is called, a KeyError is raised (more consistent)
+
+ * testlib:
+ - added assertIsNot, assertNone and assertNotNone assertion
+ - added assertUnorderedIterableEquals
+ - added assertDirEquals
+ - various failure output improvement
+
+ * umessage: umessage.date() may return unparsable string as is instead of None
+
+ * compat: adds a max function taking 'key' as keyword argument as in 2.5
+
+ * configuration: escape rest when printing for default value
+
+
+
+2008-06-08 -- 0.32.0
+ * textutils: add the apply_unit function
+
+ * testlib:
+ - added a assertXMLEqualsTuple test assertion
+ - added a assertIs assertion
+
+
+
+2008-05-08 -- 0.31.0
+ * improved documentation and error messages
+
+ * testlib: support a msg argument on more assertions, pysqlite2 as default
+
+ * pytest: pytestconf.py for customization
+
+
+
+2008-03-26 -- 0.30.0
+ * db: remember logged user on the connection
+
+ * clcommands: commands may be hidden (e.g. not displayed in help), generic
+ ListCommandsCommand useful to build bash completion helpers
+
+ * changelog: module to parse ChangeLog file as this one, backported from
+ logilab.devtools
+
+
+
+2008-03-12 -- 0.29.1
+ * date: new nb_open_days function counting worked days between two date
+
+ * adbh: add -p option to mysql commands to ask for password
+
+
+
+2008-03-05 -- 0.29.0
+ * adbh: mysql doesn't support ILIKE, implement list_indices for mysql
+
+ * db: mysql adapter use mx DateTime when available, fix unicode handling
+
+
+
+2008-02-18 -- 0.28.2
+ * testlib: restore python2.3 compatibility
+
+
+
+2008-02-15 -- 0.28.1
+ * testlib: introduce InnerTest class to name generative tests, fix
+ generative tests description storage
+
+ * pytest: fix -s option
+
+ * modutils: included Stefan Rank's patch to deal with 2.4 relative import
+
+ * configuration: don't give option's keywords not recognized by optparse,
+ fix merge_options function
+
+
+
+2008-02-05 -- 0.28.0
+ * date: new `add_days_worked` function
+
+ * shellutils: new `chown` function
+
+ * testlib: new `strict` argument to assertIsInstance
+
+ * __init__: new `attrdict` and `nullobject` classes
+
+
+
+2008-01-25 -- 0.27.0
+ * deprecation: new class_moved utility function
+
+ * interface: fix subinterface handling
+
+
+
+2008-01-10 -- 0.26.1
+ * optparser: support --version at main command level
+
+ * testlib: added man page for pytest
+
+ * textutils: fix a bug in normalize{_,_rest_}paragraph which may cause
+ infinite loop if an indent string containing some spaces is given
+
+
+
+2008-01-07 -- 0.26.0
+ * db: binarywrap support
+
+ * modutils: new LazyObject class
+
+
+
+2007-12-20 -- 0.25.2
+ * adbh: new needs_from_clause variable on db helper
+
+
+
+2007-12-11 -- 0.25.1
+ * pytest: new --profile option, setup module / teardown module hook,
+ other fixes and enhancements
+
+ * db: mysql support fixes
+
+ * adbh: fix postgres list_indices implementation
+
+
+
+2007-11-26 -- 0.25.0
+ * adbh:
+ - list_tables implementation for sqlite
+ - new list_indices, create_index, drop_index methods
+
+ * restore python < 2.4 compat
+
+
+
+2007-10-29 -- 0.24.0
+ * decorators: new classproperty decorator
+
+ * adbh: new module containing advanced db helper which were in the "db"
+ module, with additional registered procedures handling
+
+
+
+2007-10-23 -- 0.23.1
+ * modutils: fix load_module_from_* (even with use_sys=False, it should
+ try to get outer packages from sys.modules)
+
+
+
+2007-10-17 -- 0.23.0
+ * db:
+
+ - mark support_users and support_groups methods as obsolete in
+ favor of users_support and groups_support attributes
+ - new ilike_support property on dbms helpers
+ - extended db helper api
+ - completed mysql support
+
+ * textutils: new unormalize function to normalize diacritical chars by
+ their ascii equivalent
+
+ * modutils: new load_module_from_file shortcut function
+
+ * clcommands: pop_args accept None as value for expected_size_after,
+ meaning remaining args should not be checked
+
+ * interface: new extend function to dynamically add an implemented interface
+ to a new style class
+
+
+
+2007-06-25 -- 0.22.2
+ * new 'typechanged' action for configuration.read_old_config
+
+
+
+2007-05-14 -- 0.22.1
+ * important bug fix in db.py
+
+ * added history in pytest debugger sessions
+
+ * fix pytest coverage bug
+
+ * fix textutils test
+
+ * fix a bug which provoked a crash if devtools was not installed
+
+
+
+2007-05-14 -- 0.22.0
+ * pytest improvements
+
+ * shellutils: use shutil.move instead of os.rename as default action
+ of mv
+
+ * db: new `list_users` and `sql_drop_unique_constraint` methods on
+ advanced helpers
+
+ * deprecation: new `obsolete` decorator
+
+
+
+2007-02-12 -- 0.21.3
+ * fixed cached decorator to use __dict__ instead of attribute lookup,
+ avoiding potential bugs with inheritance when using cached class
+ methods
+
+
+
+2007-02-05 -- 0.21.2
+ * fix ReST normalization (#3471)
+
+
+
+2006-12-19 -- 0.21.1
+ * tree: make Node iterable (iter on its children)
+
+ * configuration: fix #3197 (OptionsManagerMixin __init__ isn't passing
+ correctly its "version" argument)
+
+ * textutils: new 'rest' argument to normalize_text to better deal with
+ ReST formated text
+
+ * some packaging fixes
+
+
+
+2006-11-14 -- 0.21.0
+ * db:
+
+ - new optional keepownership argument to backup|restore_database methods
+ - only register mxDatetime converters on psycopg2 adapter if
+ mx.DateTime is available
+
+ * moved some stuff which was in common __init__ file into specific
+ module. At this occasion new "decorators" and "deprecation" modules
+ has been added
+
+ * deprecated fileutils.[files_by_ext,include_files_by_ext,exclude_files_by_ext]
+ functions in favor of new function shellutils.find
+
+ * mark the following modules for deprecation, they will be removed in a
+ near version:
+
+ * astutils: moved to astng
+
+ * bind (never been used)
+
+ * html: deprecated
+
+ * logger/logservice: use logging module
+
+ * monclient/monserver (not used anymore)
+
+ * patricia (never been used)
+
+ * twisted_distutils (not used anymore)
+
+ * removed the following functions/methods which have been deprecated for a
+ while now:
+
+ * modutils.load_module_from_parts
+
+ * textutils.searchall
+
+ * tree.Node.leafs
+
+ * fileutils.get_by_ext, filetutils.get_mode, fileutils.ensure_mode
+
+ * umessage: more robust charset handling
+
+
+
+2006-11-03 -- 0.20.2
+ * fileutils: new remove_dead_links function
+
+ * date: add missing strptime import
+
+
+
+2006-11-01 -- 0.20.1
+ * umessage:
+ - new message_from_string function
+ - fixed get_payload encoding bug
+
+ * db: default postgres module is now psycopg2, which has been customized
+ to return mx.Datetime objects for date/time related types
+
+
+
+2006-10-27 -- 0.20.0
+ * db:
+ - fixed date handling
+ - new methods on advanced helper to generate backup commands
+
+ * configuration: basic deprecated config handling support
+
+ * new implementation of pytest
+
+ * backport a dot backend from yams into a new "graph" module
+
+
+
+2006-10-03 -- 0.19.3
+ * fixed bug in textutils.normalise_[text|paragraph] with unsplitable
+ word larger than the maximum line size
+
+ * added pytest.bat for windows installation
+
+ * changed configuration.generate_config to include None values into the
+ generated file
+
+
+
+2006-09-25 -- 0.19.2
+ * testlib:
+ - fixed a bug in find_test making it returns some bad test names
+ - new assertIsInstance method on TestCase
+
+ * optik_ext: make it works if mx.DateTime is not installed, in which case
+ the date type option won't be available
+
+ * test fixes
+
+
+
+2006-09-22 -- 0.19.1
+ * db:
+
+ - fixed bug when querying boolean on sqlite using python's bool type
+ - fixed time handling and added an adapter for DateTimeDeltaType
+ - added "drop_on_commit" argument to create_temporary_table on db helper
+ - added missing implementation of executemany on pysqlite2 wrapper to
+ support pyargs correctly like execute
+
+ * optik_ext: fixed "named" type option to support csv values and to return
+ a dictionary
+
+
+
+2006-09-05 -- 0.19.0
+ * new umessage module which provides a class similar to the standard
+ email.Message class but returning unicode strings
+
+ * new clcommands module to handle commands based command line tool
+ (based on the configuration module)
+
+ * new "date" option type in optik_ext
+
+ * new AttrObject in testlib to create objects in test with arbitrary attributes
+
+ * add pytest to run project's tests and get rid of all runtests.py
+
+ * add pytest option to enable design-by-contract using aspects
+
+ * some enhancements to the configuration module
+
+
+
+2006-08-09 -- 0.18.0
+ * added -c / --capture option to testlib.unittest_main
+
+ * fixed bugs in lgc.configuration
+
+ * optparser: added a OptionParser that extends optparse's with commands
+
+
+
+2006-07-13 -- 0.17.0
+ * python2.5 compatibility (testlib.py + compat.py)
+
+ * testlib.assertListEquals return all errors at once
+
+ * new "password" option type in optik_ext
+
+ * configuration: refactored to support interactive input of a configuration
+
+
+
+2006-06-08 -- 0.16.1
+ * testlib: improved test collections
+
+ * compat: added cmp argument to sorted
+
+
+
+2006-05-19 -- 0.16.0
+ * testlib:
+
+ - added a set of command line options (PYDEBUG is deprecated,
+ use the -i/--pdb option, and added -x/--exitfirst option)
+ - added support for generative tests
+
+ * db:
+ - fix get_connection parameter order and host/port handling
+ - added .sql_temporary_table method to advanced func helpers
+ - started a psycopg2 adapter
+
+ * configuration: enhanced to handle default value in help and man pages
+ generation (require python >= 2.4)
+
+
+
+2006-04-25 -- 0.15.1
+ * db: add missing port handling to get_connection function and
+ dbapimodule.connect methods
+
+ * testlib: various fixes and minor improvements
+
+
+
+2006-03-28 -- 0.15.0
+ * added "cached" decorator and a simple text progression bar into __init__
+
+ * added a simple text progress bar into __init__
+
+ * configuration: fixed man page generation when using python 2.4
+
+ * db: added pysqllite2 support, preconfigured to handle timestamp using
+ mxDatetime and to correctly handle boolean types
+
+
+
+2006-03-06 -- 0.14.1
+ * backported file support and add LOG_CRIT to builtin in logservice module
+
+
+
+2006-02-28 -- 0.14.0
+ * renamed assertXML*Valid to assertXML*WellFormed and deprecated the old name
+
+ * fixed modutils.load_module_from_*
+
+
+
+2006-02-03 -- 0.13.1
+ * fix some tests, patch contributed by Marien Zwart
+
+ * added ability to log into a file with make_logger()
+
+
+
+2006-01-06 -- 0.13.0
+ * testlib: ability to skip a test
+
+ * configuration:
+
+ - cleaner configuration file generation
+ - refactoring so that we can have more control on file
+ configuration loading using read_config_file and load_config_file
+ instead of load_file_configuration
+
+ * modutils: fix is_relative to return False when from_file is a file
+ located somewhere in sys.path
+
+ * ureport: new "escaped" attribute on Text nodes, controling html escaping
+
+ * compat: make set iterable and support more other set operations...
+
+ * removed the astng sub-package, since it's now self-distributed as
+ logilab-astng
+
+
+
+2005-09-06 -- 0.12.0
+ * shellutils: bug fix in mv()
+
+ * compat:
+ - use set when available
+ - added sorted and reversed
+
+ * table: new methods and some optimizations
+
+ * tree: added some deprecation warnings
+
+
+
+2005-07-25 -- 0.11.0
+ * db: refactoring, added sqlite support, new helpers to support DBMS
+ specific features
+
+
+
+2005-07-07 -- 0.10.1
+ * configuration: added basic man page generation feature
+
+ * ureports: unicode handling, some minor fixes
+
+ * testlib: enhance MockConnection
+
+ * python2.2 related fixes in configuration and astng
+
+
+
+2005-05-04 -- 0.10.0
+ * astng: improve unit tests coverage
+
+ * astng.astng: fix Function.format_args, new method
+ Function.default_value, bug fix in Node.resolve
+
+ * astng.builder: handle classmethod and staticmethod as decorator,
+ handle data descriptors when building from living objects
+
+ * ureports:
+ - new docbook formatter
+ - handle ReST like urls in the text writer
+ - new build_summary utility function
+
+
+
+2005-04-14 -- 0.9.3
+ * optik_ext: add man page generation based on optik/optparse options
+ definition
+
+ * modutils: new arguments to get_source_file to handle files without
+ extensions
+
+ * astng: fix problem with the manager and python 2.2 (optik related)
+
+
+
+2005-02-16 -- 0.9.2
+ * textutils:
+
+ - added epydoc documentation
+ - new sep argument to the get_csv function
+ - fix pb with normalize_* functions on windows platforms
+
+ * fileutils:
+
+ - added epydoc documentation
+ - fixed bug in get_by_ext (renamed files_by_ext) with the
+ exclude_dirs argument
+
+ * configuration:
+ - fixed a bug in configuration file generation on windows platforms
+ - better test coverage
+
+ * fixed testlib.DocTest which wasn't working anymore with recent
+ versions of pyunit
+
+ * added "context_file" argument to file_from_modpath to avoid
+ possible relative import problems
+
+ * astng: use the new context_file argument from Node.resolve()
+
+
+
+2005-02-04 -- 0.9.1
+ * astng:
+
+ - remove buggy print
+ - fixed builder to deal with builtin methods
+ - fixed raw_building.build_function with python 2.4
+
+ * modutils: code cleanup, some reimplementation based on "imp",
+ better handling of windows specific extensions, epydoc documentation
+
+ * fileutils: new exclude_dirs argument to the get_by_ext function
+
+ * testlib: main() support -p option to run test in a profiled mode
+
+ * generated documentation for modutils in the doc/ subdirectory
+
+
+
+2005-01-20 -- 0.9.0
+ * astng:
+
+ - refactoring of some huge methods
+ - fix interface resolving when __implements__ is defined in a parent
+ class in another module
+ - add special code in the builder to fix problem with qt
+ - new source_line method on Node
+ - fix sys.path during parsing to avoid some failure when trying
+ to get imported names by `from module import *`, and use an astng
+ building instead of exec'ing the statement
+ - fix possible AttributeError with Function.type
+ - manager.astng_from_file fallback to astng_from_module if possible
+
+ * textutils: fix bug in normalize_paragraph, unquote handle empty string
+ correctly
+
+ * modutils:
+
+ - use a cache in has_module to speed up things when heavily used
+ - fix file_from_modpath to handle pyxml and os.path
+
+ * configuration: fix problem with serialization/deserialization of empty
+ string
+
+
+
+2005-01-04 -- 0.8.0
+ * modutils: a lot of fixes/rewrite on various functions to avoid
+ unnecessary imports, sys.path pollution, and other bugs (notably
+ making pylint reporting wrong modules name/path)
+
+ * astng: new "inspector" module, initially taken from pyreverse code
+ (http://www.logilab.org/projects/pyreverse), miscellaneous bug fixes
+
+ * configuration: new 'usage' parameter on the Configuration
+ initializer
+
+ * logger: unicode support
+
+ * fileutils: get_by_ext also ignore ".svn" directories, not only "CVS"
+
+
+
+2004-11-03 -- 0.7.1
+ * astng:
+
+ - don't raise a syntax error on files missing a trailing \n.
+ - fix utils.is_abstract (was causing an unexpected exception if a
+ string exception was raised).
+ - fix utils.get_implemented.
+ - fix file based manager's cache problem.
+
+ * textutils: fixed normalize_text / normalize_paragraph functions
+
+
+
+2004-10-11 -- 0.7.0
+ * astng: new methods on the manager, returning astng with nodes for
+ packages (i.e. recursive structure instead of the flat one), with
+ automatic lazy loading + introduction of a dict like interface to
+ manipulate those nodes and Module, Class and Function nodes.
+
+ * logservice: module imported from the ginco project
+
+ * configuration: added new classes Configuration and
+ OptionsManager2Configuration adapter, fix bug in loading options
+ from file
+
+ * optik_ext/configuration: some new option type "multiple_choice"
+
+ * fileutils: new ensure_mode function
+
+ * compat: support for sum and enumerate
+
+
+
+2004-09-23 -- 0.6.0
+ * db: added DBAPIAdapter
+
+ * textutils: fix in pretty_match causing malformated messages in pylint
+ added ansi colorization management
+
+ * modutils: new functions get_module_files, has_module and file_from_modpath
+
+ * astng: some new utility functions taken from pylint, minor changes to the
+ manager API, Node.resolve doesn't support anymore "living" resolution,
+ some new methods on astng nodes
+
+ * compat: new module for a transparent compatibility layer between
+ different python version (actually 2.2 vs 2.3 for now)
+
+
+
+2004-07-08 -- 0.5.2
+ * astng: fix another bug in klassnode.ancestors() method...
+
+ * db: fix mysql access
+
+ * cli: added a space after the prompt
+
+
+
+2004-06-04 -- 0.5.1
+ * astng: fix undefined var bug in klassnode.ancestors() method
+
+ * ureports: fix attributes on title layout
+
+ * packaging:fix the setup.py script to allow bdist_winst (well, the
+ generated installer has not been tested...) with the necessary
+ logilab/__init__.py file
+
+
+
+2004-05-10 -- 0.5.0
+ * ureports: new Universal Reports sub-package
+
+ * xmlrpcutils: new xmlrpc utilities module
+
+ * astng: resolve(name) now handle (at least try) builtins
+
+ * astng: fixed Class.as_string (empty parent when no base classes)
+
+ * astng.builder: knows a little about method descriptors, Function with
+ unknown arguments have argnames==None.
+
+ * fileutils: new is_binary(filename) function
+
+ * textutils: fixed some Windows bug
+
+ * tree: base not doesn't have the "title" attribute anymore
+
+ * testlib: removed the spawn function (who used that ?!), added MockSMTP,
+ MockConfigParser, MockConnexion and DocTestCase (test class for
+ modules embedding doctest). All mocks objects are very basic and will be
+ enhanced as the need comes.
+
+ * testlib: added a TestCase class with some additional methods then
+ the regular unittest.TestCase class
+
+ * cli: allow specifying a command prefix by a class attributes,more
+ robust, print available commands on help
+
+ * db: new "binary" function to get the binary wrapper for a given driver,
+ and new "system_database" function returning the system database name
+ for different DBMS.
+
+ * configuration: better group control
+
+
+
+2004-02-20 -- 0.4.5
+ * db: it's now possible to fix the modules search order. By default call
+ set_isolation_level if psycopg is used
+
+
+
+2004-02-17 -- 0.4.4
+ * modutils: special case for os.path in get_module_part
+
+ * astng: handle special case where we are on a package node importing a module
+ using the same name as the package, which may end in an infinite loop
+ on relative imports in Node.resolve
+
+ * fileutils: new get_by_ext function
+
+
+
+2004-02-11 -- 0.4.3
+ * astng: refactoring of Class.ancestor_for_* methods (now
+ depends on python 2.2 generators)
+
+ * astng: make it more robust
+
+ * configuration: more explicit exception when a bad option is
+ provided
+
+ * configuration: define a short version of an option using the "short"
+ keyword, taking a single letter as value
+
+ * configuration: new method global_set_option on the manager
+
+ * testlib : allow no "suite" nor "Run" function in test modules
+
+ * shellutils: fix bug in *mv*
+
+
+
+2003-12-23 -- 0.4.2
+ * added Project class and some new methods to the ASTNGManger
+
+ * some new functions in astng.utils
+
+ * fixed bugs in some as_string methods
+
+ * fixed bug in textutils.get_csv
+
+ * fileutils.lines now take a "comments" argument, allowing to ignore
+ comment lines
+
+
+
+2003-11-24 -- 0.4.1
+ * added missing as_string methods on astng nodes
+
+ * bug fixes on Node.resolve
+
+ * minor fixes in textutils and fileutils
+
+ * better test coverage (need more !)
+
+
+
+2003-11-13 -- 0.4.0
+ * new textutils and shellutils modules
+
+ * full astng rewrite, now based on the compiler.ast package from the
+ standard library
+
+ * added next_sbling and previous_sibling methods to Node
+
+ * fix get_cycles
+
+
+
+2003-10-14 -- 0.3.5
+ * fixed null size cache bug
+
+ * added 'sort_by_column*' methods for tables
+
+
+
+2003-10-08 -- 0.3.4
+ * fix bug in asntg, occurring with python2.3 and modules including an
+ encoding declaration
+
+ * fix bug in astutils.get_rhs_consumed_names, occurring in lists
+ comprehension
+
+ * remove debug print statement from configuration.py which caused a
+ generation of incorrect configuration files.
+
+
+
+2003-10-01 -- 0.3.3
+ * fix bug in modutils.modpath_from_file
+
+ * new module corbautils
+
+
+
+2003-09-18 -- 0.3.2
+ * fix bug in modutils.load_module_from_parts
+
+ * add missing __future__ imports
+
+
+
+2003-09-18 -- 0.3.1
+ * change implementation of modutils.load_module_from_name (use find_module
+ and load_module instead of __import__)
+
+ * more bug fixes in astng
+
+ * new functions in fileutils (lines, export) and __init__ (Execute)
+
+
+
+2003-09-12 -- 0.3
+ * expect "def suite" or "def Run(runner=None)" on unittest module
+
+ * fixes in modutils
+
+ * major fixes in astng
+
+ * new fileutils and astutils modules
+
+ * enhancement of the configuration module
+
+ * new option type "named" in optik_the ext module
+
+
+
+2003-06-18 -- 0.2.2
+ * astng bug fixes
+
+
+
+2003-06-04 -- 0.2.1
+ * bug fixes
+
+ * fix packaging problem
+
+
+
+2003-06-02 -- 0.2.0
+ * add the interface, modutils, optik_ext and configuration modules
+
+ * add the astng sub-package
+
+ * miscellaneous fixes
+
+
+
+2003-04-17 -- 0.1.2
+ * add the stringio module
+
+ * minor fixes
+
+
+
+2003-02-28 -- 0.1.1
+ * fix bug in tree.py
+
+ * new file distutils_twisted
+
+
+
+2003-02-17 -- 0.1.0
+ * initial revision
+
+
+
diff --git a/pymode/libs/logilab-common-1.4.1/MANIFEST.in b/pymode/libs/logilab-common-1.4.1/MANIFEST.in
new file mode 100644
index 00000000..faee190f
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/MANIFEST.in
@@ -0,0 +1,14 @@
+include ChangeLog
+include README*
+include COPYING
+include COPYING.LESSER
+include bin/logilab-pytest
+include bin/logilab-pytest.bat
+include test/data/ChangeLog
+recursive-include test *.py *.txt *.msg *.ini *.zip *.egg
+recursive-include test/data/*_dir *
+recursive-include test/input *.py
+recursive-include doc/html *
+include doc/logilab-pytest.1
+include doc/makefile
+include __pkginfo__.py
diff --git a/pymode/libs/logilab-common-1.4.1/PKG-INFO b/pymode/libs/logilab-common-1.4.1/PKG-INFO
new file mode 100644
index 00000000..9dca2cdd
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/PKG-INFO
@@ -0,0 +1,164 @@
+Metadata-Version: 1.1
+Name: logilab-common
+Version: 1.4.1
+Summary: collection of low-level Python packages and modules used by Logilab projects
+Home-page: http://www.logilab.org/project/logilab-common
+Author: Logilab
+Author-email: contact@logilab.fr
+License: LGPL
+Description: Logilab's common library
+ ========================
+
+ What's this ?
+ -------------
+
+ This package contains some modules used by different Logilab projects.
+
+ It is released under the GNU Lesser General Public License.
+
+ There is no documentation available yet but the source code should be clean and
+ well documented.
+
+ Designed to ease:
+
+ * handling command line options and configuration files
+ * writing interactive command line tools
+ * manipulation of files and character strings
+ * manipulation of common structures such as graph, tree, and pattern such as visitor
+ * generating text and HTML reports
+ * more...
+
+
+ Installation
+ ------------
+
+ Extract the tarball, jump into the created directory and run ::
+
+ python setup.py install
+
+ For installation options, see ::
+
+ python setup.py install --help
+
+
+ Provided modules
+ ----------------
+
+ Here is a brief description of the available modules.
+
+ Modules providing high-level features
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ * `cache`, a cache implementation with a least recently used algorithm.
+
+ * `changelog`, a tiny library to manipulate our simplified ChangeLog file format.
+
+ * `clcommands`, high-level classes to define command line programs handling
+ different subcommands. It is based on `configuration` to get easy command line
+ / configuration file handling.
+
+ * `configuration`, some classes to handle unified configuration from both
+ command line (using optparse) and configuration file (using ConfigParser).
+
+ * `proc`, interface to Linux /proc.
+
+ * `umessage`, unicode email support.
+
+ * `ureports`, micro-reports, a way to create simple reports using python objects
+ without care of the final formatting. ReST and html formatters are provided.
+
+
+ Modules providing low-level functions and structures
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ * `compat`, provides a transparent compatibility layer between different python
+ versions.
+
+ * `date`, a set of date manipulation functions.
+
+ * `daemon`, a daemon function and mix-in class to properly start an Unix daemon
+ process.
+
+ * `decorators`, function decorators such as cached, timed...
+
+ * `deprecation`, decorator, metaclass & all to mark functions / classes as
+ deprecated or moved
+
+ * `fileutils`, some file / file path manipulation utilities.
+
+ * `graph`, graph manipulations functions such as cycle detection, bases for dot
+ file generation.
+
+ * `modutils`, python module manipulation functions.
+
+ * `shellutils`, some powerful shell like functions to replace shell scripts with
+ python scripts.
+
+ * `tasksqueue`, a prioritized tasks queue implementation.
+
+ * `textutils`, some text manipulation functions (ansi colorization, line wrapping,
+ rest support...).
+
+ * `tree`, base class to represent tree structure, and some others to make it
+ works with the visitor implementation (see below).
+
+ * `visitor`, a generic visitor pattern implementation.
+
+
+ Modules extending some standard modules
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ * `debugger`, `pdb` customization.
+
+ * `logging_ext`, extensions to `logging` module such as a colorized formatter
+ and an easier initialization function.
+
+ * `optik_ext`, defines some new option types (regexp, csv, color, date, etc.)
+ for `optik` / `optparse`
+
+
+ Modules extending some external modules
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ * `sphinx_ext`, Sphinx_ plugin defining a `autodocstring` directive.
+
+ * `vcgutils` , utilities functions to generate file readable with Georg Sander's
+ vcg tool (Visualization of Compiler Graphs).
+
+
+ To be deprecated modules
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Those `logilab.common` modules will much probably be deprecated in future
+ versions:
+
+ * `testlib`: use `unittest2`_ instead
+ * `interface`: use `zope.interface`_ if you really want this
+ * `table`, `xmlutils`: is that used?
+ * `sphinxutils`: we won't go that way imo (i == syt)
+
+
+ Comments, support, bug reports
+ ------------------------------
+
+ Project page https://www.logilab.org/project/logilab-common
+
+ Use the python-projects@lists.logilab.org mailing list.
+
+ You can subscribe to this mailing list at
+ https://lists.logilab.org/mailman/listinfo/python-projects
+
+ Archives are available at
+ https://lists.logilab.org/pipermail/python-projects/
+
+
+ .. _Sphinx: http://sphinx.pocoo.org/
+ .. _`unittest2`: http://pypi.python.org/pypi/unittest2
+ .. _`discover`: http://pypi.python.org/pypi/discover
+ .. _`zope.interface`: http://pypi.python.org/pypi/zope.interface
+
+Platform: UNKNOWN
+Classifier: Topic :: Utilities
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 3
diff --git a/pymode/libs/logilab_common-1.0.2.dist-info/DESCRIPTION.rst b/pymode/libs/logilab-common-1.4.1/README
similarity index 99%
rename from pymode/libs/logilab_common-1.0.2.dist-info/DESCRIPTION.rst
rename to pymode/libs/logilab-common-1.4.1/README
index 6b483af3..21cbe78d 100644
--- a/pymode/libs/logilab_common-1.0.2.dist-info/DESCRIPTION.rst
+++ b/pymode/libs/logilab-common-1.4.1/README
@@ -125,7 +125,6 @@ Those `logilab.common` modules will much probably be deprecated in future
versions:
* `testlib`: use `unittest2`_ instead
-* `pytest`: use `discover`_ instead
* `interface`: use `zope.interface`_ if you really want this
* `table`, `xmlutils`: is that used?
* `sphinxutils`: we won't go that way imo (i == syt)
@@ -149,5 +148,3 @@ https://lists.logilab.org/pipermail/python-projects/
.. _`unittest2`: http://pypi.python.org/pypi/unittest2
.. _`discover`: http://pypi.python.org/pypi/discover
.. _`zope.interface`: http://pypi.python.org/pypi/zope.interface
-
-
diff --git a/pymode/libs/logilab-common-1.4.1/__pkginfo__.py b/pymode/libs/logilab-common-1.4.1/__pkginfo__.py
new file mode 100644
index 00000000..b9f652fb
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/__pkginfo__.py
@@ -0,0 +1,61 @@
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+"""logilab.common packaging information"""
+__docformat__ = "restructuredtext en"
+import sys
+import os
+
+distname = 'logilab-common'
+modname = 'common'
+subpackage_of = 'logilab'
+subpackage_master = True
+
+numversion = (1, 4, 1)
+version = '.'.join([str(num) for num in numversion])
+
+license = 'LGPL' # 2.1 or later
+description = "collection of low-level Python packages and modules used by Logilab projects"
+web = "http://www.logilab.org/project/%s" % distname
+mailinglist = "mailto://python-projects@lists.logilab.org"
+author = "Logilab"
+author_email = "contact@logilab.fr"
+
+
+from os.path import join
+scripts = [join('bin', 'logilab-pytest')]
+include_dirs = [join('test', 'data')]
+
+install_requires = [
+ 'setuptools',
+ 'six >= 1.4.0',
+]
+tests_require = [
+ 'pytz',
+ 'egenix-mx-base',
+]
+
+if sys.version_info < (2, 7):
+ install_requires.append('unittest2 >= 0.5.1')
+if os.name == 'nt':
+ install_requires.append('colorama')
+
+classifiers = ["Topic :: Utilities",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 3",
+ ]
diff --git a/pymode/libs/logilab-common-1.4.1/bin/logilab-pytest b/pymode/libs/logilab-common-1.4.1/bin/logilab-pytest
new file mode 100755
index 00000000..42df3028
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/bin/logilab-pytest
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+
+import warnings
+warnings.simplefilter('default', DeprecationWarning)
+
+from logilab.common.pytest import run
+run()
diff --git a/pymode/libs/logilab-common-1.4.1/bin/logilab-pytest.bat b/pymode/libs/logilab-common-1.4.1/bin/logilab-pytest.bat
new file mode 100644
index 00000000..c664e882
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/bin/logilab-pytest.bat
@@ -0,0 +1,17 @@
+@echo off
+rem = """-*-Python-*- script
+rem -------------------- DOS section --------------------
+rem You could set PYTHONPATH or TK environment variables here
+python -x "%~f0" %*
+goto exit
+
+"""
+# -------------------- Python section --------------------
+from logilab.common.pytest import run
+run()
+
+DosExitLabel = """
+:exit
+rem """
+
+
diff --git a/pymode/libs/logilab-common-1.4.1/doc/logilab-pytest.1 b/pymode/libs/logilab-common-1.4.1/doc/logilab-pytest.1
new file mode 100644
index 00000000..51aec2e9
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/doc/logilab-pytest.1
@@ -0,0 +1,54 @@
+.TH logilab-pytest "1" "January 2008" logilab-pytest
+.SH NAME
+.B logilab-pytest
+\- run python unit tests
+
+.SH SYNOPSIS
+usage: logilab-pytest [OPTIONS] [testfile [testpattern]]
+.PP
+examples:
+.PP
+logilab-pytest path/to/mytests.py
+logilab-pytest path/to/mytests.py TheseTests
+logilab-pytest path/to/mytests.py TheseTests.test_thisone
+.PP
+logilab-pytest one (will run both test_thisone and test_thatone)
+logilab-pytest path/to/mytests.py \fB\-s\fR not (will skip test_notthisone)
+.PP
+logilab-pytest \fB\-\-coverage\fR test_foo.py
+.IP
+(only if logilab.devtools is available)
+.SS "options:"
+.TP
+\fB\-h\fR, \fB\-\-help\fR
+show this help message and exit
+.TP
+\fB\-t\fR TESTDIR
+directory where the tests will be found
+.TP
+\fB\-d\fR
+enable design\-by\-contract
+.TP
+\fB\-v\fR, \fB\-\-verbose\fR
+Verbose output
+.TP
+\fB\-i\fR, \fB\-\-pdb\fR
+Enable test failure inspection (conflicts with
+\fB\-\-coverage\fR)
+.TP
+\fB\-x\fR, \fB\-\-exitfirst\fR
+Exit on first failure (only make sense when logilab-pytest run
+one test file)
+.TP
+\fB\-s\fR SKIPPED, \fB\-\-skip\fR=\fISKIPPED\fR
+test names matching this name will be skipped to skip
+several patterns, use commas
+.TP
+\fB\-q\fR, \fB\-\-quiet\fR
+Minimal output
+.TP
+\fB\-P\fR PROFILE, \fB\-\-profile\fR=\fIPROFILE\fR
+Profile execution and store data in the given file
+.TP
+\fB\-\-coverage\fR
+run tests with pycoverage (conflicts with \fB\-\-pdb\fR)
diff --git a/pymode/libs/logilab-common-1.4.1/doc/makefile b/pymode/libs/logilab-common-1.4.1/doc/makefile
new file mode 100644
index 00000000..02f5d544
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/doc/makefile
@@ -0,0 +1,8 @@
+all: epydoc
+
+epydoc:
+ mkdir -p apidoc
+ -epydoc --parse-only -o apidoc --html -v --no-private --exclude='test' --exclude="__pkginfo__" --exclude="setup" -n "Logilab's common library" $(shell dirname $(CURDIR))/build/lib/logilab/common >/dev/null
+
+clean:
+ rm -rf apidoc
diff --git a/pymode/libs/logilab-common-1.4.1/logilab/__init__.py b/pymode/libs/logilab-common-1.4.1/logilab/__init__.py
new file mode 100644
index 00000000..de40ea7c
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/logilab/__init__.py
@@ -0,0 +1 @@
+__import__('pkg_resources').declare_namespace(__name__)
diff --git a/pymode/libs/logilab/common/__init__.py b/pymode/libs/logilab-common-1.4.1/logilab/common/__init__.py
similarity index 98%
rename from pymode/libs/logilab/common/__init__.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/__init__.py
index fc01e4df..796831a7 100644
--- a/pymode/libs/logilab/common/__init__.py
+++ b/pymode/libs/logilab-common-1.4.1/logilab/common/__init__.py
@@ -38,7 +38,7 @@
__pkginfo__.version = __version__
sys.modules['logilab.common.__pkginfo__'] = __pkginfo__
-STD_BLACKLIST = ('CVS', '.svn', '.hg', 'debian', 'dist', 'build')
+STD_BLACKLIST = ('CVS', '.svn', '.hg', '.git', '.tox', 'debian', 'dist', 'build')
IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc', '~', '.swp', '.orig')
diff --git a/pymode/libs/logilab/common/cache.py b/pymode/libs/logilab-common-1.4.1/logilab/common/cache.py
similarity index 100%
rename from pymode/libs/logilab/common/cache.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/cache.py
diff --git a/pymode/libs/logilab/common/changelog.py b/pymode/libs/logilab-common-1.4.1/logilab/common/changelog.py
similarity index 82%
rename from pymode/libs/logilab/common/changelog.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/changelog.py
index 2fff2ed6..3f62bd4c 100644
--- a/pymode/libs/logilab/common/changelog.py
+++ b/pymode/libs/logilab-common-1.4.1/logilab/common/changelog.py
@@ -3,18 +3,18 @@
#
# This file is part of logilab-common.
#
-# logilab-common is free software: you can redistribute it and/or modify it under
+# logilab-common is free software: you can redistribute it or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option) any
-# later version.
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
#
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
-# You should have received a copy of the GNU Lesser General Public License along
-# with logilab-common. If not, see .
+# You should have received a copy of the GNU Lesser General Public License
+# along with logilab-common. If not, see .
"""Manipulation of upstream change log files.
The upstream change log files format handled is simpler than the one
@@ -48,6 +48,7 @@
import sys
from stat import S_IWRITE
+import codecs
from six import string_types
@@ -55,19 +56,22 @@
SUBBULLET = '-'
INDENT = ' ' * 4
+
class NoEntry(Exception):
"""raised when we are unable to find an entry"""
+
class EntryNotFound(Exception):
"""raised when we are unable to find a given entry"""
+
class Version(tuple):
"""simple class to handle soft version number has a tuple while
correctly printing it as X.Y.Z
"""
def __new__(cls, versionstr):
if isinstance(versionstr, string_types):
- versionstr = versionstr.strip(' :') # XXX (syt) duh?
+ versionstr = versionstr.strip(' :') # XXX (syt) duh?
parsed = cls.parse(versionstr)
else:
parsed = versionstr
@@ -79,11 +83,13 @@ def parse(cls, versionstr):
try:
return [int(i) for i in versionstr.split('.')]
except ValueError as ex:
- raise ValueError("invalid literal for version '%s' (%s)"%(versionstr, ex))
+ raise ValueError("invalid literal for version '%s' (%s)" %
+ (versionstr, ex))
def __str__(self):
return '.'.join([str(i) for i in self])
+
# upstream change log #########################################################
class ChangeLogEntry(object):
@@ -109,44 +115,50 @@ def complete_latest_message(self, msg_suite):
"""complete the latest added message
"""
if not self.messages:
- raise ValueError('unable to complete last message as there is no previous message)')
- if self.messages[-1][1]: # sub messages
+ raise ValueError('unable to complete last message as '
+ 'there is no previous message)')
+ if self.messages[-1][1]: # sub messages
self.messages[-1][1][-1].append(msg_suite)
- else: # message
+ else: # message
self.messages[-1][0].append(msg_suite)
def add_sub_message(self, sub_msg, key=None):
if not self.messages:
- raise ValueError('unable to complete last message as there is no previous message)')
+ raise ValueError('unable to complete last message as '
+ 'there is no previous message)')
if key is None:
self.messages[-1][1].append([sub_msg])
else:
- raise NotImplementedError("sub message to specific key are not implemented yet")
+ raise NotImplementedError('sub message to specific key '
+ 'are not implemented yet')
def write(self, stream=sys.stdout):
"""write the entry to file """
- stream.write('%s -- %s\n' % (self.date or '', self.version or ''))
+ stream.write(u'%s -- %s\n' % (self.date or '', self.version or ''))
for msg, sub_msgs in self.messages:
- stream.write('%s%s %s\n' % (INDENT, BULLET, msg[0]))
- stream.write(''.join(msg[1:]))
+ stream.write(u'%s%s %s\n' % (INDENT, BULLET, msg[0]))
+ stream.write(u''.join(msg[1:]))
if sub_msgs:
- stream.write('\n')
+ stream.write(u'\n')
for sub_msg in sub_msgs:
- stream.write('%s%s %s\n' % (INDENT * 2, SUBBULLET, sub_msg[0]))
- stream.write(''.join(sub_msg[1:]))
- stream.write('\n')
+ stream.write(u'%s%s %s\n' %
+ (INDENT * 2, SUBBULLET, sub_msg[0]))
+ stream.write(u''.join(sub_msg[1:]))
+ stream.write(u'\n')
+
+ stream.write(u'\n\n')
- stream.write('\n\n')
class ChangeLog(object):
"""object representation of a whole ChangeLog file"""
entry_class = ChangeLogEntry
- def __init__(self, changelog_file, title=''):
+ def __init__(self, changelog_file, title=u''):
self.file = changelog_file
+ assert isinstance(title, type(u'')), 'title must be a unicode object'
self.title = title
- self.additional_content = ''
+ self.additional_content = u''
self.entries = []
self.load()
@@ -184,12 +196,12 @@ def add(self, msg, create=None):
def load(self):
""" read a logilab's ChangeLog from file """
try:
- stream = open(self.file)
+ stream = codecs.open(self.file, encoding='utf-8')
except IOError:
return
last = None
expect_sub = False
- for line in stream.readlines():
+ for line in stream:
sline = line.strip()
words = sline.split()
# if new entry
@@ -221,18 +233,17 @@ def load(self):
stream.close()
def format_title(self):
- return '%s\n\n' % self.title.strip()
+ return u'%s\n\n' % self.title.strip()
def save(self):
"""write back change log"""
# filetutils isn't importable in appengine, so import locally
from logilab.common.fileutils import ensure_fs_mode
ensure_fs_mode(self.file, S_IWRITE)
- self.write(open(self.file, 'w'))
+ self.write(codecs.open(self.file, 'w', encoding='utf-8'))
def write(self, stream=sys.stdout):
"""write changelog to stream"""
stream.write(self.format_title())
for entry in self.entries:
entry.write(stream)
-
diff --git a/pymode/libs/logilab/common/clcommands.py b/pymode/libs/logilab-common-1.4.1/logilab/common/clcommands.py
similarity index 100%
rename from pymode/libs/logilab/common/clcommands.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/clcommands.py
diff --git a/pymode/libs/logilab/common/compat.py b/pymode/libs/logilab-common-1.4.1/logilab/common/compat.py
similarity index 100%
rename from pymode/libs/logilab/common/compat.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/compat.py
diff --git a/pymode/libs/logilab/common/configuration.py b/pymode/libs/logilab-common-1.4.1/logilab/common/configuration.py
similarity index 98%
rename from pymode/libs/logilab/common/configuration.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/configuration.py
index b2924277..7a54f1af 100644
--- a/pymode/libs/logilab/common/configuration.py
+++ b/pymode/libs/logilab-common-1.4.1/logilab/common/configuration.py
@@ -122,7 +122,7 @@
from copy import copy
from warnings import warn
-from six import string_types
+from six import integer_types, string_types
from six.moves import range, configparser as cp, input
from logilab.common.compat import str_encode as _encode
@@ -372,7 +372,7 @@ def format_option_value(optdict, value):
value = value and 'yes' or 'no'
elif isinstance(value, string_types) and value.isspace():
value = "'%s'" % value
- elif optdict.get('type') == 'time' and isinstance(value, (float, int, long)):
+ elif optdict.get('type') == 'time' and isinstance(value, (float, ) + integer_types):
value = format_time(value)
elif optdict.get('type') == 'bytes' and hasattr(value, '__int__'):
value = format_bytes(value)
@@ -401,6 +401,9 @@ def ini_format(stream, options, encoding):
print('#%s=' % optname, file=stream)
else:
value = _encode(value, encoding).strip()
+ if optdict.get('type') == 'string' and '\n' in value:
+ prefix = '\n '
+ value = prefix + prefix.join(value.split('\n'))
print('%s=%s' % (optname, value), file=stream)
format_section = ini_format_section
@@ -635,7 +638,7 @@ def helpfunc(option, opt, val, p, level=helplevel):
parser = self.cfgfile_parser
parser.read([config_file])
# normalize sections'title
- for sect, values in parser._sections.items():
+ for sect, values in list(parser._sections.items()):
if not sect.isupper() and values:
parser._sections[sect.upper()] = values
elif not self.quiet:
@@ -909,7 +912,7 @@ def options_by_section(self):
(optname, optdict, self.option_value(optname)))
if None in sections:
yield None, sections.pop(None)
- for section, options in sections.items():
+ for section, options in sorted(sections.items()):
yield section.upper(), options
def options_and_values(self, options=None):
@@ -946,15 +949,15 @@ def register_options(self, options):
options_by_group = {}
for optname, optdict in options:
options_by_group.setdefault(optdict.get('group', self.name.upper()), []).append((optname, optdict))
- for group, options in options_by_group.items():
- self.add_option_group(group, None, options, self)
+ for group, group_options in options_by_group.items():
+ self.add_option_group(group, None, group_options, self)
self.options += tuple(options)
def load_defaults(self):
OptionsProviderMixIn.load_defaults(self)
def __iter__(self):
- return iter(self.config.__dict__.iteritems())
+ return iter(self.config.__dict__.items())
def __getitem__(self, key):
try:
@@ -1042,7 +1045,7 @@ def read_old_config(newconfig, changes, configfile):
option, oldtype, newvalue = action[1:]
changesindex.setdefault(option, []).append((action[0], oldtype, newvalue))
continue
- if action[1] in ('added', 'removed'):
+ if action[0] in ('added', 'removed'):
continue # nothing to do here
raise Exception('unknown change %s' % action[0])
# build a config object able to read the old config
diff --git a/pymode/libs/logilab/common/daemon.py b/pymode/libs/logilab-common-1.4.1/logilab/common/daemon.py
similarity index 100%
rename from pymode/libs/logilab/common/daemon.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/daemon.py
diff --git a/pymode/libs/logilab/common/date.py b/pymode/libs/logilab-common-1.4.1/logilab/common/date.py
similarity index 99%
rename from pymode/libs/logilab/common/date.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/date.py
index a093a8a9..1d13a770 100644
--- a/pymode/libs/logilab/common/date.py
+++ b/pymode/libs/logilab-common-1.4.1/logilab/common/date.py
@@ -237,7 +237,7 @@ def todatetime(somedate):
return datetime(somedate.year, somedate.month, somedate.day)
def datetime2ticks(somedate):
- return timegm(somedate.timetuple()) * 1000
+ return timegm(somedate.timetuple()) * 1000 + int(getattr(somedate, 'microsecond', 0) / 1000)
def ticks2datetime(ticks):
miliseconds, microseconds = divmod(ticks, 1000)
diff --git a/pymode/libs/logilab/common/debugger.py b/pymode/libs/logilab-common-1.4.1/logilab/common/debugger.py
similarity index 100%
rename from pymode/libs/logilab/common/debugger.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/debugger.py
diff --git a/pymode/libs/logilab/common/decorators.py b/pymode/libs/logilab-common-1.4.1/logilab/common/decorators.py
similarity index 100%
rename from pymode/libs/logilab/common/decorators.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/decorators.py
diff --git a/pymode/libs/logilab/common/deprecation.py b/pymode/libs/logilab-common-1.4.1/logilab/common/deprecation.py
similarity index 100%
rename from pymode/libs/logilab/common/deprecation.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/deprecation.py
diff --git a/pymode/libs/logilab/common/fileutils.py b/pymode/libs/logilab-common-1.4.1/logilab/common/fileutils.py
similarity index 97%
rename from pymode/libs/logilab/common/fileutils.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/fileutils.py
index b30cf5f8..93439d3b 100644
--- a/pymode/libs/logilab/common/fileutils.py
+++ b/pymode/libs/logilab-common-1.4.1/logilab/common/fileutils.py
@@ -28,6 +28,7 @@
__docformat__ = "restructuredtext en"
+import io
import sys
import shutil
import mimetypes
@@ -67,13 +68,7 @@ def first_level_directory(path):
return head
def abspath_listdir(path):
- """Lists path's content using absolute paths.
-
- >>> os.listdir('/home')
- ['adim', 'alf', 'arthur', 'auc']
- >>> abspath_listdir('/home')
- ['/home/adim', '/home/alf', '/home/arthur', '/home/auc']
- """
+ """Lists path's content using absolute paths."""
path = abspath(path)
return [join(path, filename) for filename in listdir(path)]
@@ -288,10 +283,8 @@ def lines(path, comments=None):
:warning: at some point this function will probably return an iterator
"""
- stream = open(path, 'U')
- result = stream_lines(stream, comments)
- stream.close()
- return result
+ with io.open(path) as stream:
+ return stream_lines(stream, comments)
def stream_lines(stream, comments=None):
diff --git a/pymode/libs/logilab/common/graph.py b/pymode/libs/logilab-common-1.4.1/logilab/common/graph.py
similarity index 100%
rename from pymode/libs/logilab/common/graph.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/graph.py
diff --git a/pymode/libs/logilab/common/interface.py b/pymode/libs/logilab-common-1.4.1/logilab/common/interface.py
similarity index 100%
rename from pymode/libs/logilab/common/interface.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/interface.py
diff --git a/pymode/libs/logilab/common/logging_ext.py b/pymode/libs/logilab-common-1.4.1/logilab/common/logging_ext.py
similarity index 100%
rename from pymode/libs/logilab/common/logging_ext.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/logging_ext.py
diff --git a/pymode/libs/logilab/common/modutils.py b/pymode/libs/logilab-common-1.4.1/logilab/common/modutils.py
similarity index 93%
rename from pymode/libs/logilab/common/modutils.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/modutils.py
index dd725d24..030cfa3b 100644
--- a/pymode/libs/logilab/common/modutils.py
+++ b/pymode/libs/logilab-common-1.4.1/logilab/common/modutils.py
@@ -32,12 +32,14 @@
import sys
import os
-from os.path import splitext, join, abspath, isdir, dirname, exists, basename
+from os.path import (splitext, join, abspath, isdir, dirname, exists,
+ basename, expanduser, normcase, realpath)
from imp import find_module, load_module, C_BUILTIN, PY_COMPILED, PKG_DIRECTORY
from distutils.sysconfig import get_config_var, get_python_lib, get_python_version
from distutils.errors import DistutilsPlatformError
-from six.moves import range
+from six import PY3
+from six.moves import map, range
try:
import zipimport
@@ -47,6 +49,7 @@
ZIPFILE = object()
from logilab.common import STD_BLACKLIST, _handle_blacklist
+from logilab.common.deprecation import deprecated
# Notes about STD_LIB_DIR
# Consider arch-specific installation for STD_LIB_DIR definition
@@ -165,7 +168,11 @@ def load_module_from_modpath(parts, path=None, use_sys=True):
module = sys.modules.get(curname)
if module is None:
mp_file, mp_filename, mp_desc = find_module(part, path)
- module = load_module(curname, mp_file, mp_filename, mp_desc)
+ try:
+ module = load_module(curname, mp_file, mp_filename, mp_desc)
+ finally:
+ if mp_file is not None:
+ mp_file.close()
if prevmodule:
setattr(prevmodule, part, module)
_file = getattr(module, '__file__', '')
@@ -215,8 +222,24 @@ def _check_init(path, mod_path):
return True
+def _canonicalize_path(path):
+ return realpath(expanduser(path))
+
+
+def _path_from_filename(filename):
+ if PY3:
+ return filename
+ else:
+ if filename.endswith(".pyc"):
+ return filename[:-1]
+ return filename
+
+
+@deprecated('you should avoid using modpath_from_file()')
def modpath_from_file(filename, extrapath=None):
- """given a file path return the corresponding splitted module's name
+ """DEPRECATED: doens't play well with symlinks and sys.meta_path
+
+ Given a file path return the corresponding splitted module's name
(i.e name of a module or package splitted on '.')
:type filename: str
@@ -235,26 +258,29 @@ def modpath_from_file(filename, extrapath=None):
:rtype: list(str)
:return: the corresponding splitted module's name
"""
- base = splitext(abspath(filename))[0]
+ filename = _path_from_filename(filename)
+ filename = _canonicalize_path(filename)
+ base = os.path.splitext(filename)[0]
+
if extrapath is not None:
- for path_ in extrapath:
+ for path_ in map(_canonicalize_path, extrapath):
path = abspath(path_)
- if path and base[:len(path)] == path:
+ if path and normcase(base[:len(path)]) == normcase(path):
submodpath = [pkg for pkg in base[len(path):].split(os.sep)
if pkg]
if _check_init(path, submodpath[:-1]):
return extrapath[path_].split('.') + submodpath
- for path in sys.path:
- path = abspath(path)
- if path and base.startswith(path):
+
+ for path in map(_canonicalize_path, sys.path):
+ if path and normcase(base).startswith(path):
modpath = [pkg for pkg in base[len(path):].split(os.sep) if pkg]
if _check_init(path, modpath[:-1]):
return modpath
+
raise ImportError('Unable to find module for %s in %s' % (
filename, ', \n'.join(sys.path)))
-
def file_from_modpath(modpath, path=None, context_file=None):
"""given a mod path (i.e. splitted module / package name), return the
corresponding file, giving priority to source file over precompiled
@@ -471,6 +497,18 @@ def cleanup_sys_modules(directories):
return cleaned
+def clean_sys_modules(names):
+ """remove submodules starting with name from `names` from `sys.modules`"""
+ cleaned = set()
+ for modname in list(sys.modules):
+ for name in names:
+ if modname.startswith(name):
+ del sys.modules[modname]
+ cleaned.add(modname)
+ break
+ return cleaned
+
+
def is_python_source(filename):
"""
rtype: bool
@@ -632,7 +670,9 @@ def _module_file(modpath, path=None):
# setuptools has added into sys.modules a module object with proper
# __path__, get back information from there
module = sys.modules[modpath.pop(0)]
- path = module.__path__
+ # use list() to protect against _NamespacePath instance we get with python 3, which
+ # find_module later doesn't like
+ path = list(module.__path__)
if not modpath:
return C_BUILTIN, None
imported = []
diff --git a/pymode/libs/logilab/common/optik_ext.py b/pymode/libs/logilab-common-1.4.1/logilab/common/optik_ext.py
similarity index 98%
rename from pymode/libs/logilab/common/optik_ext.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/optik_ext.py
index 1fd2a7f8..95489c28 100644
--- a/pymode/libs/logilab/common/optik_ext.py
+++ b/pymode/libs/logilab-common-1.4.1/logilab/common/optik_ext.py
@@ -56,6 +56,8 @@
from copy import copy
from os.path import exists
+from six import integer_types
+
# python >= 2.3
from optparse import OptionParser as BaseParser, Option as BaseOption, \
OptionGroup, OptionContainer, OptionValueError, OptionError, \
@@ -169,14 +171,14 @@ def check_color(option, opt, value):
raise OptionValueError(msg % (opt, value))
def check_time(option, opt, value):
- if isinstance(value, (int, long, float)):
+ if isinstance(value, integer_types + (float,)):
return value
return apply_units(value, TIME_UNITS)
def check_bytes(option, opt, value):
if hasattr(value, '__int__'):
return value
- return apply_units(value, BYTE_UNITS)
+ return apply_units(value, BYTE_UNITS, final=int)
class Option(BaseOption):
diff --git a/pymode/libs/logilab/common/optparser.py b/pymode/libs/logilab-common-1.4.1/logilab/common/optparser.py
similarity index 100%
rename from pymode/libs/logilab/common/optparser.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/optparser.py
diff --git a/pymode/libs/logilab/common/proc.py b/pymode/libs/logilab-common-1.4.1/logilab/common/proc.py
similarity index 100%
rename from pymode/libs/logilab/common/proc.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/proc.py
diff --git a/pymode/libs/logilab/common/pytest.py b/pymode/libs/logilab-common-1.4.1/logilab/common/pytest.py
similarity index 85%
rename from pymode/libs/logilab/common/pytest.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/pytest.py
index 3d8aca34..c644a61f 100644
--- a/pymode/libs/logilab/common/pytest.py
+++ b/pymode/libs/logilab-common-1.4.1/logilab/common/pytest.py
@@ -15,14 +15,14 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-common. If not, see .
-"""pytest is a tool that eases test running and debugging.
+"""logilab-pytest is a tool that eases test running and debugging.
-To be able to use pytest, you should either write tests using
+To be able to use logilab-pytest, you should either write tests using
the logilab.common.testlib's framework or the unittest module of the
Python's standard library.
-You can customize pytest's behaviour by defining a ``pytestconf.py`` file
-somewhere in your test directory. In this file, you can add options or
+You can customize logilab-pytest's behaviour by defining a ``pytestconf.py``
+file somewhere in your test directory. In this file, you can add options or
change the way tests are run.
To add command line options, you must define a ``update_parser`` function in
@@ -31,8 +31,8 @@
If you wish to customize the tester, you'll have to define a class named
``CustomPyTester``. This class should extend the default `PyTester` class
-defined in the pytest module. Take a look at the `PyTester` and `DjangoTester`
-classes for more information about what can be done.
+defined in the logilab.common.pytest module. Take a look at the `PyTester` and
+`DjangoTester` classes for more information about what can be done.
For instance, if you wish to add a custom -l option to specify a loglevel, you
could define the following ``pytestconf.py`` file ::
@@ -101,13 +101,13 @@ def titi(test):
examples:
-pytest path/to/mytests.py
-pytest path/to/mytests.py TheseTests
-pytest path/to/mytests.py TheseTests.test_thisone
-pytest path/to/mytests.py -m '(not long and database) or regr'
+logilab-pytest path/to/mytests.py
+logilab-pytest path/to/mytests.py TheseTests
+logilab-pytest path/to/mytests.py TheseTests.test_thisone
+logilab-pytest path/to/mytests.py -m '(not long and database) or regr'
-pytest one (will run both test_thisone and test_thatone)
-pytest path/to/mytests.py -s not (will skip test_notthisone)
+logilab-pytest one (will run both test_thisone and test_thatone)
+logilab-pytest path/to/mytests.py -s not (will skip test_notthisone)
"""
ENABLE_DBC = False
@@ -118,16 +118,20 @@ def titi(test):
from time import time, clock
import warnings
import types
+import inspect
+import traceback
from inspect import isgeneratorfunction, isclass
-from contextlib import contextmanager
from random import shuffle
+from itertools import dropwhile
+from logilab.common.deprecation import deprecated
from logilab.common.fileutils import abspath_listdir
from logilab.common import textutils
from logilab.common import testlib, STD_BLACKLIST
# use the same unittest module as testlib
from logilab.common.testlib import unittest, start_interactive_mode
-from logilab.common.deprecation import deprecated
+from logilab.common.testlib import nocoverage, pause_trace, replace_trace # bwcompat
+from logilab.common.debugger import Debugger, colorize_source
import doctest
import unittest as unittest_legacy
@@ -148,62 +152,6 @@ def titi(test):
CONF_FILE = 'pytestconf.py'
-## coverage pausing tools
-
-@contextmanager
-def replace_trace(trace=None):
- """A context manager that temporary replaces the trace function"""
- oldtrace = sys.gettrace()
- sys.settrace(trace)
- try:
- yield
- finally:
- # specific hack to work around a bug in pycoverage, see
- # https://bitbucket.org/ned/coveragepy/issue/123
- if (oldtrace is not None and not callable(oldtrace) and
- hasattr(oldtrace, 'pytrace')):
- oldtrace = oldtrace.pytrace
- sys.settrace(oldtrace)
-
-
-def pause_trace():
- """A context manager that temporary pauses any tracing"""
- return replace_trace()
-
-class TraceController(object):
- ctx_stack = []
-
- @classmethod
- @deprecated('[lgc 0.63.1] Use the pause_trace() context manager')
- def pause_tracing(cls):
- cls.ctx_stack.append(pause_trace())
- cls.ctx_stack[-1].__enter__()
-
- @classmethod
- @deprecated('[lgc 0.63.1] Use the pause_trace() context manager')
- def resume_tracing(cls):
- cls.ctx_stack.pop().__exit__(None, None, None)
-
-
-pause_tracing = TraceController.pause_tracing
-resume_tracing = TraceController.resume_tracing
-
-
-def nocoverage(func):
- """Function decorator that pauses tracing functions"""
- if hasattr(func, 'uncovered'):
- return func
- func.uncovered = True
-
- def not_covered(*args, **kwargs):
- with pause_trace():
- return func(*args, **kwargs)
- not_covered.uncovered = True
- return not_covered
-
-## end of coverage pausing tools
-
-
TESTFILE_RE = re.compile("^((unit)?test.*|smoketest)\.py$")
def this_is_a_testfile(filename):
"""returns True if `filename` seems to be a test file"""
@@ -611,12 +559,12 @@ def capture_and_rebuild(option, opt, value, parser):
warnings.simplefilter('ignore', DeprecationWarning)
rebuild_cmdline(option, opt, value, parser)
- # pytest options
+ # logilab-pytest options
parser.add_option('-t', dest='testdir', default=None,
help="directory where the tests will be found")
parser.add_option('-d', dest='dbc', default=False,
action="store_true", help="enable design-by-contract")
- # unittest_main options provided and passed through pytest
+ # unittest_main options provided and passed through logilab-pytest
parser.add_option('-v', '--verbose', callback=rebuild_cmdline,
action="callback", help="Verbose output")
parser.add_option('-i', '--pdb', callback=rebuild_and_store,
@@ -625,7 +573,7 @@ def capture_and_rebuild(option, opt, value, parser):
parser.add_option('-x', '--exitfirst', callback=rebuild_and_store,
dest="exitfirst", default=False,
action="callback", help="Exit on first failure "
- "(only make sense when pytest run one test file)")
+ "(only make sense when logilab-pytest run one test file)")
parser.add_option('-R', '--restart', callback=rebuild_and_store,
dest="restart", default=False,
action="callback",
@@ -651,7 +599,7 @@ def capture_and_rebuild(option, opt, value, parser):
if DJANGO_FOUND:
parser.add_option('-J', '--django', dest='django', default=False,
action="store_true",
- help='use pytest for django test cases')
+ help='use logilab-pytest for django test cases')
return parser
@@ -684,6 +632,7 @@ def parseargs(parser):
+@deprecated('[logilab-common 1.3] logilab-pytest is deprecated, use another test runner')
def run():
parser = make_parser()
rootdir, testercls = project_root(parser)
@@ -938,7 +887,7 @@ def does_match_tags(self, test):
return True # no pattern
def _makeResult(self):
- return testlib.SkipAwareTestResult(self.stream, self.descriptions,
+ return SkipAwareTestResult(self.stream, self.descriptions,
self.verbosity, self.exitfirst,
self.pdbmode, self.cvg, self.colorize)
@@ -983,6 +932,155 @@ def run(self, test):
self.stream.writeln("")
return result
+
+class SkipAwareTestResult(unittest._TextTestResult):
+
+ def __init__(self, stream, descriptions, verbosity,
+ exitfirst=False, pdbmode=False, cvg=None, colorize=False):
+ super(SkipAwareTestResult, self).__init__(stream,
+ descriptions, verbosity)
+ self.skipped = []
+ self.debuggers = []
+ self.fail_descrs = []
+ self.error_descrs = []
+ self.exitfirst = exitfirst
+ self.pdbmode = pdbmode
+ self.cvg = cvg
+ self.colorize = colorize
+ self.pdbclass = Debugger
+ self.verbose = verbosity > 1
+
+ def descrs_for(self, flavour):
+ return getattr(self, '%s_descrs' % flavour.lower())
+
+ def _create_pdb(self, test_descr, flavour):
+ self.descrs_for(flavour).append( (len(self.debuggers), test_descr) )
+ if self.pdbmode:
+ self.debuggers.append(self.pdbclass(sys.exc_info()[2]))
+
+ def _iter_valid_frames(self, frames):
+ """only consider non-testlib frames when formatting traceback"""
+ lgc_testlib = osp.abspath(__file__)
+ std_testlib = osp.abspath(unittest.__file__)
+ invalid = lambda fi: osp.abspath(fi[1]) in (lgc_testlib, std_testlib)
+ for frameinfo in dropwhile(invalid, frames):
+ yield frameinfo
+
+ def _exc_info_to_string(self, err, test):
+ """Converts a sys.exc_info()-style tuple of values into a string.
+
+ This method is overridden here because we want to colorize
+ lines if --color is passed, and display local variables if
+ --verbose is passed
+ """
+ exctype, exc, tb = err
+ output = ['Traceback (most recent call last)']
+ frames = inspect.getinnerframes(tb)
+ colorize = self.colorize
+ frames = enumerate(self._iter_valid_frames(frames))
+ for index, (frame, filename, lineno, funcname, ctx, ctxindex) in frames:
+ filename = osp.abspath(filename)
+ if ctx is None: # pyc files or C extensions for instance
+ source = ''
+ else:
+ source = ''.join(ctx)
+ if colorize:
+ filename = textutils.colorize_ansi(filename, 'magenta')
+ source = colorize_source(source)
+ output.append(' File "%s", line %s, in %s' % (filename, lineno, funcname))
+ output.append(' %s' % source.strip())
+ if self.verbose:
+ output.append('%r == %r' % (dir(frame), test.__module__))
+ output.append('')
+ output.append(' ' + ' local variables '.center(66, '-'))
+ for varname, value in sorted(frame.f_locals.items()):
+ output.append(' %s: %r' % (varname, value))
+ if varname == 'self': # special handy processing for self
+ for varname, value in sorted(vars(value).items()):
+ output.append(' self.%s: %r' % (varname, value))
+ output.append(' ' + '-' * 66)
+ output.append('')
+ output.append(''.join(traceback.format_exception_only(exctype, exc)))
+ return '\n'.join(output)
+
+ def addError(self, test, err):
+ """err -> (exc_type, exc, tcbk)"""
+ exc_type, exc, _ = err
+ if isinstance(exc, testlib.SkipTest):
+ assert exc_type == SkipTest
+ self.addSkip(test, exc)
+ else:
+ if self.exitfirst:
+ self.shouldStop = True
+ descr = self.getDescription(test)
+ super(SkipAwareTestResult, self).addError(test, err)
+ self._create_pdb(descr, 'error')
+
+ def addFailure(self, test, err):
+ if self.exitfirst:
+ self.shouldStop = True
+ descr = self.getDescription(test)
+ super(SkipAwareTestResult, self).addFailure(test, err)
+ self._create_pdb(descr, 'fail')
+
+ def addSkip(self, test, reason):
+ self.skipped.append((test, reason))
+ if self.showAll:
+ self.stream.writeln("SKIPPED")
+ elif self.dots:
+ self.stream.write('S')
+
+ def printErrors(self):
+ super(SkipAwareTestResult, self).printErrors()
+ self.printSkippedList()
+
+ def printSkippedList(self):
+ # format (test, err) compatible with unittest2
+ for test, err in self.skipped:
+ descr = self.getDescription(test)
+ self.stream.writeln(self.separator1)
+ self.stream.writeln("%s: %s" % ('SKIPPED', descr))
+ self.stream.writeln("\t%s" % err)
+
+ def printErrorList(self, flavour, errors):
+ for (_, descr), (test, err) in zip(self.descrs_for(flavour), errors):
+ self.stream.writeln(self.separator1)
+ self.stream.writeln("%s: %s" % (flavour, descr))
+ self.stream.writeln(self.separator2)
+ self.stream.writeln(err)
+ self.stream.writeln('no stdout'.center(len(self.separator2)))
+ self.stream.writeln('no stderr'.center(len(self.separator2)))
+
+
+from .decorators import monkeypatch
+orig_call = testlib.TestCase.__call__
+@monkeypatch(testlib.TestCase, '__call__')
+def call(self, result=None, runcondition=None, options=None):
+ orig_call(self, result=result, runcondition=runcondition, options=options)
+ if hasattr(options, "exitfirst") and options.exitfirst:
+ # add this test to restart file
+ try:
+ restartfile = open(FILE_RESTART, 'a')
+ try:
+ descr = '.'.join((self.__class__.__module__,
+ self.__class__.__name__,
+ self._testMethodName))
+ restartfile.write(descr+os.linesep)
+ finally:
+ restartfile.close()
+ except Exception:
+ print("Error while saving succeeded test into",
+ osp.join(os.getcwd(), FILE_RESTART),
+ file=sys.__stderr__)
+ raise
+
+
+@monkeypatch(testlib.TestCase)
+def defaultTestResult(self):
+ """return a new instance of the defaultTestResult"""
+ return SkipAwareTestResult()
+
+
class NonStrictTestLoader(unittest.TestLoader):
"""
Overrides default testloader to be able to omit classname when
@@ -1186,7 +1284,7 @@ def enable_dbc(*args):
# monkeypatch unittest and doctest (ouch !)
-unittest._TextTestResult = testlib.SkipAwareTestResult
+unittest._TextTestResult = SkipAwareTestResult
unittest.TextTestRunner = SkipAwareTextTestRunner
unittest.TestLoader = NonStrictTestLoader
unittest.TestProgram = SkipAwareTestProgram
@@ -1200,3 +1298,7 @@ def enable_dbc(*args):
unittest.FunctionTestCase.__bases__ = (testlib.TestCase,)
unittest.TestSuite.run = _ts_run
unittest.TestSuite._wrapped_run = _ts_wrapped_run
+
+if __name__ == '__main__':
+ run()
+
diff --git a/pymode/libs/logilab/common/registry.py b/pymode/libs/logilab-common-1.4.1/logilab/common/registry.py
similarity index 95%
rename from pymode/libs/logilab/common/registry.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/registry.py
index 86a85f94..07d43532 100644
--- a/pymode/libs/logilab/common/registry.py
+++ b/pymode/libs/logilab-common-1.4.1/logilab/common/registry.py
@@ -58,8 +58,11 @@
Predicates
----------
.. autoclass:: Predicate
-.. autofunc:: objectify_predicate
+.. autofunction:: objectify_predicate
.. autoclass:: yes
+.. autoclass:: AndPredicate
+.. autoclass:: OrPredicate
+.. autoclass:: NotPredicate
Debugging
---------
@@ -78,6 +81,7 @@
__docformat__ = "restructuredtext en"
import sys
+import pkgutil
import types
import weakref
import traceback as tb
@@ -91,6 +95,7 @@
from logilab.common.modutils import modpath_from_file
from logilab.common.logging_ext import set_log_methods
from logilab.common.decorators import classproperty
+from logilab.common.deprecation import deprecated
class RegistryException(Exception):
@@ -207,12 +212,22 @@ def __new__(cls, *args, **kwargs):
"""Add a __module__ attribute telling the module where the instance was
created, for automatic registration.
"""
+ module = kwargs.pop('__module__', None)
obj = super(RegistrableInstance, cls).__new__(cls)
- # XXX subclass must no override __new__
- filepath = tb.extract_stack(limit=2)[0][0]
- obj.__module__ = _modname_from_path(filepath)
+ if module is None:
+ warn('instantiate {0} with '
+ '__module__=__name__'.format(cls.__name__),
+ DeprecationWarning)
+ # XXX subclass must no override __new__
+ filepath = tb.extract_stack(limit=2)[0][0]
+ obj.__module__ = _modname_from_path(filepath)
+ else:
+ obj.__module__ = module
return obj
+ def __init__(self, __module__=None):
+ super(RegistrableInstance, self).__init__()
+
class Registry(dict):
"""The registry store a set of implementations associated to identifier:
@@ -237,15 +252,15 @@ class Registry(dict):
Registration methods:
- .. automethod: register
- .. automethod: unregister
+ .. automethod:: register
+ .. automethod:: unregister
Selection methods:
- .. automethod: select
- .. automethod: select_or_none
- .. automethod: possible_objects
- .. automethod: object_by_id
+ .. automethod:: select
+ .. automethod:: select_or_none
+ .. automethod:: possible_objects
+ .. automethod:: object_by_id
"""
def __init__(self, debugmode):
super(Registry, self).__init__()
@@ -483,11 +498,10 @@ def f(self, arg1):
Controlling object registration
-------------------------------
- Dynamic loading is triggered by calling the
- :meth:`register_objects` method, given a list of directories to
- inspect for python modules.
+ Dynamic loading is triggered by calling the :meth:`register_modnames`
+ method, given a list of modules names to inspect.
- .. automethod: register_objects
+ .. automethod:: register_modnames
For each module, by default, all compatible objects are registered
automatically. However if some objects come as replacement of
@@ -672,6 +686,7 @@ def init_registration(self, path, extrapath=None):
self._loadedmods = {}
return filemods
+ @deprecated('use register_modnames() instead')
def register_objects(self, path, extrapath=None):
"""register all objects found walking down """
# load views from each directory in the instance's path
@@ -681,6 +696,23 @@ def register_objects(self, path, extrapath=None):
self.load_file(filepath, modname)
self.initialization_completed()
+ def register_modnames(self, modnames):
+ """register all objects found in """
+ self.reset()
+ self._loadedmods = {}
+ self._toloadmods = {}
+ toload = []
+ for modname in modnames:
+ filepath = pkgutil.find_loader(modname).get_filename()
+ if filepath[-4:] in ('.pyc', '.pyo'):
+ # The source file *must* exists
+ filepath = filepath[:-1]
+ self._toloadmods[modname] = filepath
+ toload.append((filepath, modname))
+ for filepath, modname in toload:
+ self.load_file(filepath, modname)
+ self.initialization_completed()
+
def initialization_completed(self):
"""call initialization_completed() on all known registries"""
for reg in self.values():
@@ -720,7 +752,6 @@ def is_reload_needed(self, path):
def load_file(self, filepath, modname):
""" load registrable objects (if any) from a python file """
- from logilab.common.modutils import load_module_from_name
if modname in self._loadedmods:
return
self._loadedmods[modname] = {}
@@ -735,7 +766,9 @@ def load_file(self, filepath, modname):
# module
self._lastmodifs[filepath] = mdate
# load the module
- module = load_module_from_name(modname)
+ if sys.version_info < (3,) and not isinstance(modname, str):
+ modname = str(modname)
+ module = __import__(modname, fromlist=modname.split('.')[:-1])
self.load_module(module)
def load_module(self, module):
@@ -1113,8 +1146,6 @@ def __call__(self, *args, **kwargs):
# deprecated stuff #############################################################
-from logilab.common.deprecation import deprecated
-
@deprecated('[lgc 0.59] use Registry.objid class method instead')
def classid(cls):
return '%s.%s' % (cls.__module__, cls.__name__)
diff --git a/pymode/libs/logilab/common/shellutils.py b/pymode/libs/logilab-common-1.4.1/logilab/common/shellutils.py
similarity index 85%
rename from pymode/libs/logilab/common/shellutils.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/shellutils.py
index 4e689560..b9d5fa6d 100644
--- a/pymode/libs/logilab/common/shellutils.py
+++ b/pymode/libs/logilab-common-1.4.1/logilab/common/shellutils.py
@@ -44,15 +44,6 @@
from logilab.common.compat import str_to_bytes
from logilab.common.deprecation import deprecated
-try:
- from logilab.common.proc import ProcInfo, NoSuchProcess
-except ImportError:
- # windows platform
- class NoSuchProcess(Exception): pass
-
- def ProcInfo(pid):
- raise NoSuchProcess()
-
class tempdir(object):
@@ -245,53 +236,6 @@ def __init__(self, command):
Execute = deprecated('Use subprocess.Popen instead')(Execute)
-def acquire_lock(lock_file, max_try=10, delay=10, max_delay=3600):
- """Acquire a lock represented by a file on the file system
-
- If the process written in lock file doesn't exist anymore, we remove the
- lock file immediately
- If age of the lock_file is greater than max_delay, then we raise a UserWarning
- """
- count = abs(max_try)
- while count:
- try:
- fd = os.open(lock_file, os.O_EXCL | os.O_RDWR | os.O_CREAT)
- os.write(fd, str_to_bytes(str(os.getpid())) )
- os.close(fd)
- return True
- except OSError as e:
- if e.errno == errno.EEXIST:
- try:
- fd = open(lock_file, "r")
- pid = int(fd.readline())
- pi = ProcInfo(pid)
- age = (time.time() - os.stat(lock_file)[stat.ST_MTIME])
- if age / max_delay > 1 :
- raise UserWarning("Command '%s' (pid %s) has locked the "
- "file '%s' for %s minutes"
- % (pi.name(), pid, lock_file, age/60))
- except UserWarning:
- raise
- except NoSuchProcess:
- os.remove(lock_file)
- except Exception:
- # The try block is not essential. can be skipped.
- # Note: ProcInfo object is only available for linux
- # process information are not accessible...
- # or lock_file is no more present...
- pass
- else:
- raise
- count -= 1
- time.sleep(delay)
- else:
- raise Exception('Unable to acquire %s' % lock_file)
-
-def release_lock(lock_file):
- """Release a lock represented by a file on the file system."""
- os.remove(lock_file)
-
-
class ProgressBar(object):
"""A simple text progression bar."""
@@ -360,7 +304,7 @@ def finish(self):
class DummyProgressBar(object):
- __slot__ = ('text',)
+ __slots__ = ('text',)
def refresh(self):
pass
diff --git a/pymode/libs/logilab/common/sphinx_ext.py b/pymode/libs/logilab-common-1.4.1/logilab/common/sphinx_ext.py
similarity index 100%
rename from pymode/libs/logilab/common/sphinx_ext.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/sphinx_ext.py
diff --git a/pymode/libs/logilab/common/sphinxutils.py b/pymode/libs/logilab-common-1.4.1/logilab/common/sphinxutils.py
similarity index 100%
rename from pymode/libs/logilab/common/sphinxutils.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/sphinxutils.py
diff --git a/pymode/libs/logilab/common/table.py b/pymode/libs/logilab-common-1.4.1/logilab/common/table.py
similarity index 100%
rename from pymode/libs/logilab/common/table.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/table.py
diff --git a/pymode/libs/logilab/common/tasksqueue.py b/pymode/libs/logilab-common-1.4.1/logilab/common/tasksqueue.py
similarity index 100%
rename from pymode/libs/logilab/common/tasksqueue.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/tasksqueue.py
diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/testlib.py b/pymode/libs/logilab-common-1.4.1/logilab/common/testlib.py
new file mode 100644
index 00000000..fa3e36ee
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/logilab/common/testlib.py
@@ -0,0 +1,708 @@
+# -*- coding: utf-8 -*-
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+"""Run tests.
+
+This will find all modules whose name match a given prefix in the test
+directory, and run them. Various command line options provide
+additional facilities.
+
+Command line options:
+
+ -v verbose -- run tests in verbose mode with output to stdout
+ -q quiet -- don't print anything except if a test fails
+ -t testdir -- directory where the tests will be found
+ -x exclude -- add a test to exclude
+ -p profile -- profiled execution
+ -d dbc -- enable design-by-contract
+ -m match -- only run test matching the tag pattern which follow
+
+If no non-option arguments are present, prefixes used are 'test',
+'regrtest', 'smoketest' and 'unittest'.
+
+"""
+
+from __future__ import print_function
+
+__docformat__ = "restructuredtext en"
+# modified copy of some functions from test/regrtest.py from PyXml
+# disable camel case warning
+# pylint: disable=C0103
+
+from contextlib import contextmanager
+import sys
+import os, os.path as osp
+import re
+import difflib
+import tempfile
+import math
+import warnings
+from shutil import rmtree
+from operator import itemgetter
+from inspect import isgeneratorfunction
+
+from six import PY2, add_metaclass, string_types
+from six.moves import builtins, range, configparser, input
+
+from logilab.common.deprecation import class_deprecated, deprecated
+
+import unittest as unittest_legacy
+if not getattr(unittest_legacy, "__package__", None):
+ try:
+ import unittest2 as unittest
+ from unittest2 import SkipTest
+ except ImportError:
+ raise ImportError("You have to install python-unittest2 to use %s" % __name__)
+else:
+ import unittest as unittest
+ from unittest import SkipTest
+
+from functools import wraps
+
+from logilab.common.debugger import Debugger
+from logilab.common.decorators import cached, classproperty
+from logilab.common import textutils
+
+
+__all__ = ['unittest_main', 'find_tests', 'nocoverage', 'pause_trace']
+
+DEFAULT_PREFIXES = ('test', 'regrtest', 'smoketest', 'unittest',
+ 'func', 'validation')
+
+is_generator = deprecated('[lgc 0.63] use inspect.isgeneratorfunction')(isgeneratorfunction)
+
+# used by unittest to count the number of relevant levels in the traceback
+__unittest = 1
+
+
+@deprecated('with_tempdir is deprecated, use {0}.TemporaryDirectory.'.format(
+ 'tempfile' if not PY2 else 'backports.tempfile'))
+def with_tempdir(callable):
+ """A decorator ensuring no temporary file left when the function return
+ Work only for temporary file created with the tempfile module"""
+ if isgeneratorfunction(callable):
+ def proxy(*args, **kwargs):
+ old_tmpdir = tempfile.gettempdir()
+ new_tmpdir = tempfile.mkdtemp(prefix="temp-lgc-")
+ tempfile.tempdir = new_tmpdir
+ try:
+ for x in callable(*args, **kwargs):
+ yield x
+ finally:
+ try:
+ rmtree(new_tmpdir, ignore_errors=True)
+ finally:
+ tempfile.tempdir = old_tmpdir
+ return proxy
+
+ @wraps(callable)
+ def proxy(*args, **kargs):
+
+ old_tmpdir = tempfile.gettempdir()
+ new_tmpdir = tempfile.mkdtemp(prefix="temp-lgc-")
+ tempfile.tempdir = new_tmpdir
+ try:
+ return callable(*args, **kargs)
+ finally:
+ try:
+ rmtree(new_tmpdir, ignore_errors=True)
+ finally:
+ tempfile.tempdir = old_tmpdir
+ return proxy
+
+def in_tempdir(callable):
+ """A decorator moving the enclosed function inside the tempfile.tempfdir
+ """
+ @wraps(callable)
+ def proxy(*args, **kargs):
+
+ old_cwd = os.getcwd()
+ os.chdir(tempfile.tempdir)
+ try:
+ return callable(*args, **kargs)
+ finally:
+ os.chdir(old_cwd)
+ return proxy
+
+def within_tempdir(callable):
+ """A decorator run the enclosed function inside a tmpdir removed after execution
+ """
+ proxy = with_tempdir(in_tempdir(callable))
+ proxy.__name__ = callable.__name__
+ return proxy
+
+def find_tests(testdir,
+ prefixes=DEFAULT_PREFIXES, suffix=".py",
+ excludes=(),
+ remove_suffix=True):
+ """
+ Return a list of all applicable test modules.
+ """
+ tests = []
+ for name in os.listdir(testdir):
+ if not suffix or name.endswith(suffix):
+ for prefix in prefixes:
+ if name.startswith(prefix):
+ if remove_suffix and name.endswith(suffix):
+ name = name[:-len(suffix)]
+ if name not in excludes:
+ tests.append(name)
+ tests.sort()
+ return tests
+
+
+## PostMortem Debug facilities #####
+def start_interactive_mode(result):
+ """starts an interactive shell so that the user can inspect errors
+ """
+ debuggers = result.debuggers
+ descrs = result.error_descrs + result.fail_descrs
+ if len(debuggers) == 1:
+ # don't ask for test name if there's only one failure
+ debuggers[0].start()
+ else:
+ while True:
+ testindex = 0
+ print("Choose a test to debug:")
+ # order debuggers in the same way than errors were printed
+ print("\n".join(['\t%s : %s' % (i, descr) for i, (_, descr)
+ in enumerate(descrs)]))
+ print("Type 'exit' (or ^D) to quit")
+ print()
+ try:
+ todebug = input('Enter a test name: ')
+ if todebug.strip().lower() == 'exit':
+ print()
+ break
+ else:
+ try:
+ testindex = int(todebug)
+ debugger = debuggers[descrs[testindex][0]]
+ except (ValueError, IndexError):
+ print("ERROR: invalid test number %r" % (todebug, ))
+ else:
+ debugger.start()
+ except (EOFError, KeyboardInterrupt):
+ print()
+ break
+
+
+# coverage pausing tools #####################################################
+
+@contextmanager
+def replace_trace(trace=None):
+ """A context manager that temporary replaces the trace function"""
+ oldtrace = sys.gettrace()
+ sys.settrace(trace)
+ try:
+ yield
+ finally:
+ # specific hack to work around a bug in pycoverage, see
+ # https://bitbucket.org/ned/coveragepy/issue/123
+ if (oldtrace is not None and not callable(oldtrace) and
+ hasattr(oldtrace, 'pytrace')):
+ oldtrace = oldtrace.pytrace
+ sys.settrace(oldtrace)
+
+
+pause_trace = replace_trace
+
+
+def nocoverage(func):
+ """Function decorator that pauses tracing functions"""
+ if hasattr(func, 'uncovered'):
+ return func
+ func.uncovered = True
+
+ def not_covered(*args, **kwargs):
+ with pause_trace():
+ return func(*args, **kwargs)
+ not_covered.uncovered = True
+ return not_covered
+
+
+# test utils ##################################################################
+
+
+# Add deprecation warnings about new api used by module level fixtures in unittest2
+# http://www.voidspace.org.uk/python/articles/unittest2.shtml#setupmodule-and-teardownmodule
+class _DebugResult(object): # simplify import statement among unittest flavors..
+ "Used by the TestSuite to hold previous class when running in debug."
+ _previousTestClass = None
+ _moduleSetUpFailed = False
+ shouldStop = False
+
+# backward compatibility: TestSuite might be imported from lgc.testlib
+TestSuite = unittest.TestSuite
+
+class keywords(dict):
+ """Keyword args (**kwargs) support for generative tests."""
+
+class starargs(tuple):
+ """Variable arguments (*args) for generative tests."""
+ def __new__(cls, *args):
+ return tuple.__new__(cls, args)
+
+unittest_main = unittest.main
+
+
+class InnerTestSkipped(SkipTest):
+ """raised when a test is skipped"""
+ pass
+
+def parse_generative_args(params):
+ args = []
+ varargs = ()
+ kwargs = {}
+ flags = 0 # 2 <=> starargs, 4 <=> kwargs
+ for param in params:
+ if isinstance(param, starargs):
+ varargs = param
+ if flags:
+ raise TypeError('found starargs after keywords !')
+ flags |= 2
+ args += list(varargs)
+ elif isinstance(param, keywords):
+ kwargs = param
+ if flags & 4:
+ raise TypeError('got multiple keywords parameters')
+ flags |= 4
+ elif flags & 2 or flags & 4:
+ raise TypeError('found parameters after kwargs or args')
+ else:
+ args.append(param)
+
+ return args, kwargs
+
+
+class InnerTest(tuple):
+ def __new__(cls, name, *data):
+ instance = tuple.__new__(cls, data)
+ instance.name = name
+ return instance
+
+class Tags(set):
+ """A set of tag able validate an expression"""
+
+ def __init__(self, *tags, **kwargs):
+ self.inherit = kwargs.pop('inherit', True)
+ if kwargs:
+ raise TypeError("%s are an invalid keyword argument for this function" % kwargs.keys())
+
+ if len(tags) == 1 and not isinstance(tags[0], string_types):
+ tags = tags[0]
+ super(Tags, self).__init__(tags, **kwargs)
+
+ def __getitem__(self, key):
+ return key in self
+
+ def match(self, exp):
+ return eval(exp, {}, self)
+
+ def __or__(self, other):
+ return Tags(*super(Tags, self).__or__(other))
+
+
+# duplicate definition from unittest2 of the _deprecate decorator
+def _deprecate(original_func):
+ def deprecated_func(*args, **kwargs):
+ warnings.warn(
+ ('Please use %s instead.' % original_func.__name__),
+ DeprecationWarning, 2)
+ return original_func(*args, **kwargs)
+ return deprecated_func
+
+class TestCase(unittest.TestCase):
+ """A unittest.TestCase extension with some additional methods."""
+ maxDiff = None
+ tags = Tags()
+
+ def __init__(self, methodName='runTest'):
+ super(TestCase, self).__init__(methodName)
+ self.__exc_info = sys.exc_info
+ self.__testMethodName = self._testMethodName
+ self._current_test_descr = None
+ self._options_ = None
+
+ @classproperty
+ @cached
+ def datadir(cls): # pylint: disable=E0213
+ """helper attribute holding the standard test's data directory
+
+ NOTE: this is a logilab's standard
+ """
+ mod = sys.modules[cls.__module__]
+ return osp.join(osp.dirname(osp.abspath(mod.__file__)), 'data')
+ # cache it (use a class method to cache on class since TestCase is
+ # instantiated for each test run)
+
+ @classmethod
+ def datapath(cls, *fname):
+ """joins the object's datadir and `fname`"""
+ return osp.join(cls.datadir, *fname)
+
+ def set_description(self, descr):
+ """sets the current test's description.
+ This can be useful for generative tests because it allows to specify
+ a description per yield
+ """
+ self._current_test_descr = descr
+
+ # override default's unittest.py feature
+ def shortDescription(self):
+ """override default unittest shortDescription to handle correctly
+ generative tests
+ """
+ if self._current_test_descr is not None:
+ return self._current_test_descr
+ return super(TestCase, self).shortDescription()
+
+ def quiet_run(self, result, func, *args, **kwargs):
+ try:
+ func(*args, **kwargs)
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except unittest.SkipTest as e:
+ if hasattr(result, 'addSkip'):
+ result.addSkip(self, str(e))
+ else:
+ warnings.warn("TestResult has no addSkip method, skips not reported",
+ RuntimeWarning, 2)
+ result.addSuccess(self)
+ return False
+ except:
+ result.addError(self, self.__exc_info())
+ return False
+ return True
+
+ def _get_test_method(self):
+ """return the test method"""
+ return getattr(self, self._testMethodName)
+
+ def optval(self, option, default=None):
+ """return the option value or default if the option is not define"""
+ return getattr(self._options_, option, default)
+
+ def __call__(self, result=None, runcondition=None, options=None):
+ """rewrite TestCase.__call__ to support generative tests
+ This is mostly a copy/paste from unittest.py (i.e same
+ variable names, same logic, except for the generative tests part)
+ """
+ if result is None:
+ result = self.defaultTestResult()
+ self._options_ = options
+ # if result.cvg:
+ # result.cvg.start()
+ testMethod = self._get_test_method()
+ if (getattr(self.__class__, "__unittest_skip__", False) or
+ getattr(testMethod, "__unittest_skip__", False)):
+ # If the class or method was skipped.
+ try:
+ skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
+ or getattr(testMethod, '__unittest_skip_why__', ''))
+ if hasattr(result, 'addSkip'):
+ result.addSkip(self, skip_why)
+ else:
+ warnings.warn("TestResult has no addSkip method, skips not reported",
+ RuntimeWarning, 2)
+ result.addSuccess(self)
+ finally:
+ result.stopTest(self)
+ return
+ if runcondition and not runcondition(testMethod):
+ return # test is skipped
+ result.startTest(self)
+ try:
+ if not self.quiet_run(result, self.setUp):
+ return
+ generative = isgeneratorfunction(testMethod)
+ # generative tests
+ if generative:
+ self._proceed_generative(result, testMethod,
+ runcondition)
+ else:
+ status = self._proceed(result, testMethod)
+ success = (status == 0)
+ if not self.quiet_run(result, self.tearDown):
+ return
+ if not generative and success:
+ result.addSuccess(self)
+ finally:
+ # if result.cvg:
+ # result.cvg.stop()
+ result.stopTest(self)
+
+ def _proceed_generative(self, result, testfunc, runcondition=None):
+ # cancel startTest()'s increment
+ result.testsRun -= 1
+ success = True
+ try:
+ for params in testfunc():
+ if runcondition and not runcondition(testfunc,
+ skipgenerator=False):
+ if not (isinstance(params, InnerTest)
+ and runcondition(params)):
+ continue
+ if not isinstance(params, (tuple, list)):
+ params = (params, )
+ func = params[0]
+ args, kwargs = parse_generative_args(params[1:])
+ # increment test counter manually
+ result.testsRun += 1
+ status = self._proceed(result, func, args, kwargs)
+ if status == 0:
+ result.addSuccess(self)
+ success = True
+ else:
+ success = False
+ # XXX Don't stop anymore if an error occured
+ #if status == 2:
+ # result.shouldStop = True
+ if result.shouldStop: # either on error or on exitfirst + error
+ break
+ except self.failureException:
+ result.addFailure(self, self.__exc_info())
+ success = False
+ except SkipTest as e:
+ result.addSkip(self, e)
+ except:
+ # if an error occurs between two yield
+ result.addError(self, self.__exc_info())
+ success = False
+ return success
+
+ def _proceed(self, result, testfunc, args=(), kwargs=None):
+ """proceed the actual test
+ returns 0 on success, 1 on failure, 2 on error
+
+ Note: addSuccess can't be called here because we have to wait
+ for tearDown to be successfully executed to declare the test as
+ successful
+ """
+ kwargs = kwargs or {}
+ try:
+ testfunc(*args, **kwargs)
+ except self.failureException:
+ result.addFailure(self, self.__exc_info())
+ return 1
+ except KeyboardInterrupt:
+ raise
+ except InnerTestSkipped as e:
+ result.addSkip(self, e)
+ return 1
+ except SkipTest as e:
+ result.addSkip(self, e)
+ return 0
+ except:
+ result.addError(self, self.__exc_info())
+ return 2
+ return 0
+
+ def innerSkip(self, msg=None):
+ """mark a generative test as skipped for the reason"""
+ msg = msg or 'test was skipped'
+ raise InnerTestSkipped(msg)
+
+ if sys.version_info >= (3,2):
+ assertItemsEqual = unittest.TestCase.assertCountEqual
+ else:
+ assertCountEqual = unittest.TestCase.assertItemsEqual
+
+TestCase.assertItemsEqual = deprecated('assertItemsEqual is deprecated, use assertCountEqual')(
+ TestCase.assertItemsEqual)
+
+import doctest
+
+class SkippedSuite(unittest.TestSuite):
+ def test(self):
+ """just there to trigger test execution"""
+ self.skipped_test('doctest module has no DocTestSuite class')
+
+
+class DocTestFinder(doctest.DocTestFinder):
+
+ def __init__(self, *args, **kwargs):
+ self.skipped = kwargs.pop('skipped', ())
+ doctest.DocTestFinder.__init__(self, *args, **kwargs)
+
+ def _get_test(self, obj, name, module, globs, source_lines):
+ """override default _get_test method to be able to skip tests
+ according to skipped attribute's value
+ """
+ if getattr(obj, '__name__', '') in self.skipped:
+ return None
+ return doctest.DocTestFinder._get_test(self, obj, name, module,
+ globs, source_lines)
+
+
+@add_metaclass(class_deprecated)
+class DocTest(TestCase):
+ """trigger module doctest
+ I don't know how to make unittest.main consider the DocTestSuite instance
+ without this hack
+ """
+ __deprecation_warning__ = 'use stdlib doctest module with unittest API directly'
+ skipped = ()
+ def __call__(self, result=None, runcondition=None, options=None):\
+ # pylint: disable=W0613
+ try:
+ finder = DocTestFinder(skipped=self.skipped)
+ suite = doctest.DocTestSuite(self.module, test_finder=finder)
+ # XXX iirk
+ doctest.DocTestCase._TestCase__exc_info = sys.exc_info
+ except AttributeError:
+ suite = SkippedSuite()
+ # doctest may gork the builtins dictionnary
+ # This happen to the "_" entry used by gettext
+ old_builtins = builtins.__dict__.copy()
+ try:
+ return suite.run(result)
+ finally:
+ builtins.__dict__.clear()
+ builtins.__dict__.update(old_builtins)
+ run = __call__
+
+ def test(self):
+ """just there to trigger test execution"""
+
+
+class MockConnection:
+ """fake DB-API 2.0 connexion AND cursor (i.e. cursor() return self)"""
+
+ def __init__(self, results):
+ self.received = []
+ self.states = []
+ self.results = results
+
+ def cursor(self):
+ """Mock cursor method"""
+ return self
+ def execute(self, query, args=None):
+ """Mock execute method"""
+ self.received.append( (query, args) )
+ def fetchone(self):
+ """Mock fetchone method"""
+ return self.results[0]
+ def fetchall(self):
+ """Mock fetchall method"""
+ return self.results
+ def commit(self):
+ """Mock commiy method"""
+ self.states.append( ('commit', len(self.received)) )
+ def rollback(self):
+ """Mock rollback method"""
+ self.states.append( ('rollback', len(self.received)) )
+ def close(self):
+ """Mock close method"""
+ pass
+
+
+def mock_object(**params):
+ """creates an object using params to set attributes
+ >>> option = mock_object(verbose=False, index=range(5))
+ >>> option.verbose
+ False
+ >>> option.index
+ [0, 1, 2, 3, 4]
+ """
+ return type('Mock', (), params)()
+
+
+def create_files(paths, chroot):
+ """Creates directories and files found in .
+
+ :param paths: list of relative paths to files or directories
+ :param chroot: the root directory in which paths will be created
+
+ >>> from os.path import isdir, isfile
+ >>> isdir('/tmp/a')
+ False
+ >>> create_files(['a/b/foo.py', 'a/b/c/', 'a/b/c/d/e.py'], '/tmp')
+ >>> isdir('/tmp/a')
+ True
+ >>> isdir('/tmp/a/b/c')
+ True
+ >>> isfile('/tmp/a/b/c/d/e.py')
+ True
+ >>> isfile('/tmp/a/b/foo.py')
+ True
+ """
+ dirs, files = set(), set()
+ for path in paths:
+ path = osp.join(chroot, path)
+ filename = osp.basename(path)
+ # path is a directory path
+ if filename == '':
+ dirs.add(path)
+ # path is a filename path
+ else:
+ dirs.add(osp.dirname(path))
+ files.add(path)
+ for dirpath in dirs:
+ if not osp.isdir(dirpath):
+ os.makedirs(dirpath)
+ for filepath in files:
+ open(filepath, 'w').close()
+
+
+class AttrObject: # XXX cf mock_object
+ def __init__(self, **kwargs):
+ self.__dict__.update(kwargs)
+
+def tag(*args, **kwargs):
+ """descriptor adding tag to a function"""
+ def desc(func):
+ assert not hasattr(func, 'tags')
+ func.tags = Tags(*args, **kwargs)
+ return func
+ return desc
+
+def require_version(version):
+ """ Compare version of python interpreter to the given one. Skip the test
+ if older.
+ """
+ def check_require_version(f):
+ version_elements = version.split('.')
+ try:
+ compare = tuple([int(v) for v in version_elements])
+ except ValueError:
+ raise ValueError('%s is not a correct version : should be X.Y[.Z].' % version)
+ current = sys.version_info[:3]
+ if current < compare:
+ def new_f(self, *args, **kwargs):
+ self.skipTest('Need at least %s version of python. Current version is %s.' % (version, '.'.join([str(element) for element in current])))
+ new_f.__name__ = f.__name__
+ return new_f
+ else:
+ return f
+ return check_require_version
+
+def require_module(module):
+ """ Check if the given module is loaded. Skip the test if not.
+ """
+ def check_require_module(f):
+ try:
+ __import__(module)
+ return f
+ except ImportError:
+ def new_f(self, *args, **kwargs):
+ self.skipTest('%s can not be imported.' % module)
+ new_f.__name__ = f.__name__
+ return new_f
+ return check_require_module
+
diff --git a/pymode/libs/logilab/common/textutils.py b/pymode/libs/logilab-common-1.4.1/logilab/common/textutils.py
similarity index 99%
rename from pymode/libs/logilab/common/textutils.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/textutils.py
index 9046f975..356b1a89 100644
--- a/pymode/libs/logilab/common/textutils.py
+++ b/pymode/libs/logilab-common-1.4.1/logilab/common/textutils.py
@@ -70,6 +70,8 @@
u'\xf8': u'o', # LATIN SMALL LETTER O WITH STROKE
u'\xbb': u'"', # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xdf': u'ss', # LATIN SMALL LETTER SHARP S
+ u'\u2013': u'-', # HYPHEN
+ u'\u2019': u"'", # SIMPLE QUOTE
}
def unormalize(ustring, ignorenonascii=None, substitute=None):
diff --git a/pymode/libs/logilab/common/tree.py b/pymode/libs/logilab-common-1.4.1/logilab/common/tree.py
similarity index 100%
rename from pymode/libs/logilab/common/tree.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/tree.py
diff --git a/pymode/libs/logilab/common/umessage.py b/pymode/libs/logilab-common-1.4.1/logilab/common/umessage.py
similarity index 64%
rename from pymode/libs/logilab/common/umessage.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/umessage.py
index a5e47995..a0394bc6 100644
--- a/pymode/libs/logilab/common/umessage.py
+++ b/pymode/libs/logilab-common-1.4.1/logilab/common/umessage.py
@@ -22,15 +22,13 @@
import email
from encodings import search_function
import sys
-if sys.version_info >= (2, 5):
- from email.utils import parseaddr, parsedate
- from email.header import decode_header
-else:
- from email.Utils import parseaddr, parsedate
- from email.Header import decode_header
+from email.utils import parseaddr, parsedate
+from email.header import decode_header
from datetime import datetime
+from six import text_type, binary_type
+
try:
from mx.DateTime import DateTime
except ImportError:
@@ -44,7 +42,14 @@ def decode_QP(string):
for decoded, charset in decode_header(string):
if not charset :
charset = 'iso-8859-15'
- parts.append(decoded.decode(charset, 'replace'))
+ # python 3 sometimes returns str and sometimes bytes.
+ # the 'official' fix is to use the new 'policy' APIs
+ # https://bugs.python.org/issue24797
+ # let's just handle this bug ourselves for now
+ if isinstance(decoded, binary_type):
+ decoded = decoded.decode(charset, 'replace')
+ assert isinstance(decoded, text_type)
+ parts.append(decoded)
if sys.version_info < (3, 3):
# decoding was non-RFC compliant wrt to whitespace handling
@@ -55,13 +60,13 @@ def decode_QP(string):
def message_from_file(fd):
try:
return UMessage(email.message_from_file(fd))
- except email.Errors.MessageParseError:
+ except email.errors.MessageParseError:
return ''
def message_from_string(string):
try:
return UMessage(email.message_from_string(string))
- except email.Errors.MessageParseError:
+ except email.errors.MessageParseError:
return ''
class UMessage:
@@ -96,61 +101,39 @@ def walk(self):
for part in self.message.walk():
yield UMessage(part)
- if sys.version_info < (3, 0):
-
- def get_payload(self, index=None, decode=False):
- message = self.message
- if index is None:
- payload = message.get_payload(index, decode)
- if isinstance(payload, list):
- return [UMessage(msg) for msg in payload]
- if message.get_content_maintype() != 'text':
- return payload
-
- charset = message.get_content_charset() or 'iso-8859-1'
- if search_function(charset) is None:
- charset = 'iso-8859-1'
- return unicode(payload or '', charset, "replace")
- else:
- payload = UMessage(message.get_payload(index, decode))
- return payload
-
- def get_content_maintype(self):
- return unicode(self.message.get_content_maintype())
-
- def get_content_type(self):
- return unicode(self.message.get_content_type())
-
- def get_filename(self, failobj=None):
- value = self.message.get_filename(failobj)
- if value is failobj:
- return value
- try:
- return unicode(value)
- except UnicodeDecodeError:
- return u'error decoding filename'
-
- else:
-
- def get_payload(self, index=None, decode=False):
- message = self.message
- if index is None:
- payload = message.get_payload(index, decode)
- if isinstance(payload, list):
- return [UMessage(msg) for msg in payload]
+ def get_payload(self, index=None, decode=False):
+ message = self.message
+ if index is None:
+ payload = message.get_payload(index, decode)
+ if isinstance(payload, list):
+ return [UMessage(msg) for msg in payload]
+ if message.get_content_maintype() != 'text':
+ return payload
+ if isinstance(payload, text_type):
return payload
- else:
- payload = UMessage(message.get_payload(index, decode))
- return payload
-
- def get_content_maintype(self):
- return self.message.get_content_maintype()
-
- def get_content_type(self):
- return self.message.get_content_type()
- def get_filename(self, failobj=None):
- return self.message.get_filename(failobj)
+ charset = message.get_content_charset() or 'iso-8859-1'
+ if search_function(charset) is None:
+ charset = 'iso-8859-1'
+ return text_type(payload or b'', charset, "replace")
+ else:
+ payload = UMessage(message.get_payload(index, decode))
+ return payload
+
+ def get_content_maintype(self):
+ return text_type(self.message.get_content_maintype())
+
+ def get_content_type(self):
+ return text_type(self.message.get_content_type())
+
+ def get_filename(self, failobj=None):
+ value = self.message.get_filename(failobj)
+ if value is failobj:
+ return value
+ try:
+ return text_type(value)
+ except UnicodeDecodeError:
+ return u'error decoding filename'
# other convenience methods ###############################################
diff --git a/pymode/libs/logilab/common/ureports/__init__.py b/pymode/libs/logilab-common-1.4.1/logilab/common/ureports/__init__.py
similarity index 100%
rename from pymode/libs/logilab/common/ureports/__init__.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/ureports/__init__.py
diff --git a/pymode/libs/logilab/common/ureports/docbook_writer.py b/pymode/libs/logilab-common-1.4.1/logilab/common/ureports/docbook_writer.py
similarity index 100%
rename from pymode/libs/logilab/common/ureports/docbook_writer.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/ureports/docbook_writer.py
diff --git a/pymode/libs/logilab/common/ureports/html_writer.py b/pymode/libs/logilab-common-1.4.1/logilab/common/ureports/html_writer.py
similarity index 100%
rename from pymode/libs/logilab/common/ureports/html_writer.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/ureports/html_writer.py
diff --git a/pymode/libs/logilab/common/ureports/nodes.py b/pymode/libs/logilab-common-1.4.1/logilab/common/ureports/nodes.py
similarity index 100%
rename from pymode/libs/logilab/common/ureports/nodes.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/ureports/nodes.py
diff --git a/pymode/libs/logilab/common/ureports/text_writer.py b/pymode/libs/logilab-common-1.4.1/logilab/common/ureports/text_writer.py
similarity index 100%
rename from pymode/libs/logilab/common/ureports/text_writer.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/ureports/text_writer.py
diff --git a/pymode/libs/logilab/common/urllib2ext.py b/pymode/libs/logilab-common-1.4.1/logilab/common/urllib2ext.py
similarity index 100%
rename from pymode/libs/logilab/common/urllib2ext.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/urllib2ext.py
diff --git a/pymode/libs/logilab/common/vcgutils.py b/pymode/libs/logilab-common-1.4.1/logilab/common/vcgutils.py
similarity index 100%
rename from pymode/libs/logilab/common/vcgutils.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/vcgutils.py
diff --git a/pymode/libs/logilab/common/visitor.py b/pymode/libs/logilab-common-1.4.1/logilab/common/visitor.py
similarity index 100%
rename from pymode/libs/logilab/common/visitor.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/visitor.py
diff --git a/pymode/libs/logilab/common/xmlutils.py b/pymode/libs/logilab-common-1.4.1/logilab/common/xmlutils.py
similarity index 100%
rename from pymode/libs/logilab/common/xmlutils.py
rename to pymode/libs/logilab-common-1.4.1/logilab/common/xmlutils.py
diff --git a/pymode/libs/logilab-common-1.4.1/setup.cfg b/pymode/libs/logilab-common-1.4.1/setup.cfg
new file mode 100644
index 00000000..8b48b197
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/setup.cfg
@@ -0,0 +1,9 @@
+[bdist_rpm]
+packager = Sylvain Thenault
+provides = logilab.common
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/pymode/libs/logilab-common-1.4.1/setup.py b/pymode/libs/logilab-common-1.4.1/setup.py
new file mode 100644
index 00000000..c565ee15
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/setup.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+# pylint: disable=W0404,W0622,W0704,W0613,W0152
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+"""Generic Setup script, takes package info from __pkginfo__.py file.
+"""
+__docformat__ = "restructuredtext en"
+
+from setuptools import setup, find_packages
+from io import open
+from os import path
+
+here = path.abspath(path.dirname(__file__))
+
+pkginfo = {}
+with open(path.join(here, '__pkginfo__.py')) as f:
+ exec(f.read(), pkginfo)
+
+# Get the long description from the relevant file
+with open(path.join(here, 'README'), encoding='utf-8') as f:
+ long_description = f.read()
+
+setup(
+ name=pkginfo['distname'],
+ version=pkginfo['version'],
+ description=pkginfo['description'],
+ long_description=long_description,
+ url=pkginfo['web'],
+ author=pkginfo['author'],
+ author_email=pkginfo['author_email'],
+ license=pkginfo['license'],
+ # See https://pypi.python.org/pypi?%3Aaction=list_classifiers
+ classifiers=pkginfo['classifiers'],
+ packages=find_packages(exclude=['contrib', 'docs', 'test*']),
+ namespace_packages=[pkginfo['subpackage_of']],
+ install_requires=pkginfo['install_requires'],
+ tests_require=pkginfo['tests_require'],
+ scripts=pkginfo['scripts'],
+)
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/ChangeLog b/pymode/libs/logilab-common-1.4.1/test/data/ChangeLog
new file mode 100644
index 00000000..22a45529
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/ChangeLog
@@ -0,0 +1,184 @@
+ChangeLog for logilab.devtools
+==============================
+
+ --
+ * added the missing dos2unix script to the distribution
+
+ * major debianize refactoring using class / inheritance instead of
+ functions composition
+
+ * import the version control library from oobrother extended with code
+ from devtools / apycot
+
+ * Singing in the rain:
+
+ - I'm
+ - singing in the rain
+
+ * Big change multiline
+ tata titi toto
+
+ - small change
+ - other change
+ - multiline change
+ really ?
+ - Eat your vegetable and brush after every meals
+
+
+
+2004-02-13 -- 0.4.5
+ * fix debianize to handle dependencies to python standalone package
+ (ie no "python" prefix in the default package)
+
+ * fixed cvslog in rlog mode
+
+
+
+2004-02-11 -- 0.4.4
+ * check web and ftp variables from __pkginfo__
+
+ * check for long and short descriptions in __pkginfo__
+
+ * outdated copyright is now a warning
+
+ * consider distuils automaticaly install .c files
+
+ * fix check_package exit status
+
+ * merged sgml, elisp and data packages in generated debian files
+
+
+
+2003-12-05 -- 0.4.3
+ * fix bug in buildeb making it usable from buildpackage...
+
+
+
+2003-11-24 -- 0.4.2
+ * fixed pb with check_info_module and catalog, when not launched from the
+ package directory
+
+ * ignore build directory in check_manifest
+
+ * fix to avoid pb with "non executed" docstring in pycoverage
+
+ * add support for --help and fix exit status to pycoverage
+
+
+
+2003-11-20 -- 0.4.1
+ * added code coverage tool, starting from
+ http://www.garethrees.org/2001/12/04/python-coverage/
+
+ * added --help option to buildeb
+
+
+
+2003-11-14 -- 0.4.0
+ * added a python script buildeb to build debian package (buildpackage call
+ this script now)
+
+ * debianize now puts tests in a separated package (-test) and generate
+ package for zope >= 2.6.2 (i.e. python 2.2)
+
+ * fix detection of examples directory in pkginfo
+
+ * fix debhelper dependency in build-depends
+
+ * remove minor bug in buildpackage (try to move archive.gz instead of
+ archive.tar.gz
+
+ * bug fix in debianize zope handler
+
+
+
+2003-10-06 -- 0.3.4
+ * remove important bug in buildpackage (rm sourcetree when building a
+ source distrib)
+
+ * add version to dependency between main packages and sub-packages (-data,
+ -elisp and -sgml)
+
+ * change way of creating the .orig.tar.gz
+
+ * create source distribution when building debian package
+
+ * fix path in log message for MANIFEST.in, __pkginfo__ and bin directory
+
+ * make changelog more robust
+
+ * debianize bug fixes
+
+
+
+2003-09-22 -- 0.3.3
+ * fix python.postinst script to avoid compiling of others packages :)
+
+
+
+2003-09-19 -- 0.3.2
+ * add basic support for XSLT distribution
+
+ * fix DTD and catalog handling in debianize
+
+ * fix bug in check_pkginfo
+
+ * updated documentation
+
+
+
+2003-09-18 -- 0.3.1
+ * add support for data files in debianize
+
+ * test python version in debianize
+
+ * minor fixes
+
+ * updated setup.py template
+
+
+
+2003-09-18 -- 0.3.0
+ * updates for a new packaging standard
+
+ * removed jabbercli, cvs_filecheck
+
+ * added preparedistrib, tagpackage, pkginfo
+
+ * simpler debianize relying on a generic setup.py
+
+ * fix some debian templates
+
+ * checkpackage rewrite
+
+ * provides checkers for the tester package
+
+
+
+2003-08-29 -- 0.2.4
+ * added cvs_filecheck
+
+
+
+2003-06-20 -- 0.2.2
+ * buildpackages fixes
+
+
+
+2003-06-17 -- 0.2.1
+ * fix setup.py
+
+ * make pkghandlers.export working with python <= 2.1
+
+ * add the mailinglist variable in __pkginfo__, used for announce
+ generation in makedistrib
+
+
+
+2003-06-16 -- 0.2.0
+ * minor enhancements
+
+ * get package information for __pkginfo__.py
+
+
+
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/MyPyPa-0.1.0.zip b/pymode/libs/logilab-common-1.4.1/test/data/MyPyPa-0.1.0.zip
new file mode 100644
index 0000000000000000000000000000000000000000..a7b3125f999ac1f8e8bd5b5260d3de1eeb840fec
GIT binary patch
literal 206
zcmWIWW@h1H00CCVN~>2f#aSXiHV6wb$S~wq7E~4_>c_`t=4F<|$LkeThK6u5Fh|`u
z=?TK672FJrEZ>Ko&DF0`ZbY
d5DS}qtPuOq>.
+"""logilab.common packaging information"""
+__docformat__ = "restructuredtext en"
+import sys
+import os
+
+distname = 'logilab-common'
+modname = 'common'
+subpackage_of = 'logilab'
+subpackage_master = True
+
+numversion = (0, 63, 2)
+version = '.'.join([str(num) for num in numversion])
+
+license = 'LGPL' # 2.1 or later
+description = "collection of low-level Python packages and modules used by Logilab projects"
+web = "http://www.logilab.org/project/%s" % distname
+mailinglist = "mailto://python-projects@lists.logilab.org"
+author = "Logilab"
+author_email = "contact@logilab.fr"
+
+
+from os.path import join
+scripts = [join('bin', 'logilab-pytest')]
+include_dirs = [join('test', 'data')]
+
+install_requires = [
+ 'six >= 1.4.0',
+ ]
+tests_require = ['pytz']
+
+if sys.version_info < (2, 7):
+ install_requires.append('unittest2 >= 0.5.1')
+if os.name == 'nt':
+ install_requires.append('colorama')
+
+classifiers = ["Topic :: Utilities",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 3",
+ ]
diff --git a/pymode/libs/astroid/interpreter/_import/__init__.py b/pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/NOTHING
similarity index 100%
rename from pymode/libs/astroid/interpreter/_import/__init__.py
rename to pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/NOTHING
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/README b/pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/README
new file mode 100644
index 00000000..27ab0b99
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/README
@@ -0,0 +1 @@
+thank you
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/subdir/coin b/pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/subdir/coin
new file mode 100644
index 00000000..0e46b314
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/subdir/coin
@@ -0,0 +1 @@
+baba
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/subdir/toto.txt b/pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/subdir/toto.txt
new file mode 100644
index 00000000..785a58b9
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/subdir/toto.txt
@@ -0,0 +1,53 @@
+Lorem ipsum dolor sit amet, consectetuer adipisci elit. Necesse qui
+quidem constituam tantis, et possunt placeat ipsum ex aut iucunde aut
+facta, aut impediente autem totum unum directam eius tum voluptate
+sensuum reperiuntur ad ab, quae ac.. Sed eius enim a, tranquillat ob
+vexetur permagna potius voluptate eo aliae, vivamus esse solis ut non,
+atomis videatur in ut, mihi litteris si ante vivere, deinde
+emancipaverat appetendum sine erant ex metu philosophiae fatemur, et
+magis non corpora ne, maluisti ita locupletiorem medicorum.. Tradere
+imperitos exiguam in sint saluti temeritate hoc, nullam nec quaerat,
+eademque vivendum, contra similique.
+
+Molestiae qui, tam sic ea honesto, graeca consecutionem voluptate
+inertissimae sunt, corpora denique fabulis dicere ab et quae ad
+politus tum in nostris.. Plane pueriliter, hoc affectus quid iis plus
+videtur dolorem vivere ad esse asperiores.. Quorum si nihilo eram
+conflixisse nec inpotenti, et bonum ad nostris servare omni, saepe
+multis, consequantur id, in fructuosam multi quod, voluptatem abducat
+a tantum sit error ipso si respirare corrupte referuntur, maiorem..
+Voluptatem a etiam perspici gravissimas, cuius.. Unum morbis ne esse
+conscientia tamen conclusionemque notionem, amentur quam, praeclarorum
+eum consulatu iis invitat solum porro, quidem ad patria, fore res
+athenis sempiternum alii venire, est mei nam improbis dolorem,
+permulta timidiores.
+
+Et inquam sic familias, sequatur animis quae et quae ea esse, autem
+impediri quaeque modo inciderint consecutionem expectata, sed severa
+etiamsi, in egregios temporibus infinito ad artibus, voluptatem
+aristotele, tandem aliquo industriae collegi timiditatem sibi igitur
+aut, se cum tranquillitate loquuntur quod nullo, quam suum illustribus
+fugiendam illis tam consequatur.. Quas maximisque impendere ipsum se
+petat altera enim ocurreret sibi maxime, possit ea aegritudo aut ulla,
+et quod sed.
+
+Verissimum confirmat accurate totam iisque sequitur aut probabo et et
+adhibenda, mihi sed ad et quod erga minima rerum eius quod, tale et
+libidinosarum liber, omnis quae et nunc sicine, nec at aut omnem,
+sententiae a, repudiandae.. Vero esse crudelis amentur ut, atque
+facilius vita invitat, delectus excepturi ex libidinum non qua
+consequi beate quae ratio.. Illa poetis videor requirere, quippiam et
+autem ut et esset voluptate neque consilia sed voluptatibus est
+virtutum minima et, interesse exquirere et peccandi quae carere se,
+angere.. Firme nomine oratio perferendis si voluptates cogitavisse,
+feci maledici ea vis et, nam quae legantur animum animis temeritate,
+amicitiam desideraturam tollatur nisi de voluptatem.
+
+Ii videri accedit de.. Graeci tum factis ea ea itaque sunt latinis
+detractis reprehensiones nostrum sola non tantopere perfruique quoque
+fruenda aptissimum nostrum, pueros graeca qui eruditionem est quae,
+labore.. Omnia si quaerimus, si praetermissum vero deserunt quia
+democriti retinere ignoratione, iam de gerendarum vel a maxime
+provident, in eadem si praeterierunt, certa cibo ut utilitatibus nullo
+quod voluptatis iis eamque omnia, stare aut, quamquam et, ut illa
+susceperant legant consiliisque, est sed quantum igitur.
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/deprecation.py b/pymode/libs/logilab-common-1.4.1/test/data/deprecation.py
new file mode 100644
index 00000000..be3b1031
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/deprecation.py
@@ -0,0 +1,4 @@
+# placeholder used by unittest_deprecation
+
+def moving_target():
+ pass
diff --git a/pymode/libs/pylint/extensions/__init__.py b/pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/NOTHING
similarity index 100%
rename from pymode/libs/pylint/extensions/__init__.py
rename to pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/NOTHING
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/README b/pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/README
new file mode 100644
index 00000000..27ab0b99
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/README
@@ -0,0 +1 @@
+thank you
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/subdir/toto.txt b/pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/subdir/toto.txt
new file mode 100644
index 00000000..4bf7233a
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/subdir/toto.txt
@@ -0,0 +1,53 @@
+Lorem ipsum dolor sit amet, consectetuer adipisci elit. Necesse qui
+quidem constituam tantis, et possunt placeat ipsum ex aut iucunde aut
+facta, aut impediente autem totum unum directam eius tum voluptate
+sensuum reperiuntur ad ab, quae ac.. Sed eius enim a, tranquillat ob
+vexetur permagna potius voluptate eo aliae, vivamus esse solis ut non,
+atomis videatur in ut, mihi litteris si ante vivere, deinde
+emancipaverat appetendum sine erant ex metu philosophiae fatemur, et
+magis non corpora ne, maluisti ita locupletiorem medicorum.. Tradere
+imperitos exiguam in sint saluti temeritate hoc, nullam nec quaerat,
+eademque vivendum, contra similique.
+
+Molestiae qui, tam sic ea honesto, graeca consecutionem voluptate
+inertissimae sunt, corpora denique fabulis dicere ab et quae ad
+politus tum in nostris.. Plane pueriliter, hoc affectus quid iis plus
+videtur dolorem vivere ad esse asperiores.. Quorum si nihilo eram
+pedalis pertinax ii minus, referta mediocrem iustitiam acutum quo
+rerum constringendos ex pondere lucilius essent neglexerit insequitur
+a tantum sit error ipso si respirare corrupte referuntur, maiorem..
+Voluptatem a etiam perspici gravissimas, cuius.. Unum morbis ne esse
+conscientia tamen conclusionemque notionem, amentur quam, praeclarorum
+eum consulatu iis invitat solum porro, quidem ad patria, fore res
+athenis sempiternum alii venire, est mei nam improbis dolorem,
+permulta timidiores.
+
+Et inquam sic familias, sequatur animis quae et quae ea esse, autem
+impediri quaeque modo inciderint consecutionem expectata, sed severa
+etiamsi, in egregios temporibus infinito ad artibus, voluptatem
+aristotele, tandem aliquo industriae collegi timiditatem sibi igitur
+aut, se cum tranquillitate loquuntur quod nullo, quam suum illustribus
+fugiendam illis tam consequatur.. Quas maximisque impendere ipsum se
+petat altera enim ocurreret sibi maxime, possit ea aegritudo aut ulla,
+et quod sed.
+
+Verissimum confirmat accurate totam iisque sequitur aut probabo et et
+adhibenda, mihi sed ad et quod erga minima rerum eius quod, tale et
+libidinosarum liber, omnis quae et nunc sicine, nec at aut omnem,
+sententiae a, repudiandae.. Vero esse crudelis amentur ut, atque
+facilius vita invitat, delectus excepturi ex libidinum non qua
+consequi beate quae ratio.. Illa poetis videor requirere, quippiam et
+autem ut et esset voluptate neque consilia sed voluptatibus est
+virtutum minima et, interesse exquirere et peccandi quae carere se,
+angere.. Firme nomine oratio perferendis si voluptates cogitavisse,
+feci maledici ea vis et, nam quae legantur animum animis temeritate,
+amicitiam desideraturam tollatur nisi de voluptatem.
+
+Ii videri accedit de.. Graeci tum factis ea ea itaque sunt latinis
+detractis reprehensiones nostrum sola non tantopere perfruique quoque
+fruenda aptissimum nostrum, pueros graeca qui eruditionem est quae,
+labore.. Omnia si quaerimus, si praetermissum vero deserunt quia
+democriti retinere ignoratione, iam de gerendarum vel a maxime
+provident, in eadem si praeterierunt, certa cibo ut utilitatibus nullo
+quod voluptatis iis eamque omnia, stare aut, quamquam et, ut illa
+susceperant legant consiliisque, est sed quantum igitur.
diff --git a/pymode/libs/rope/base/oi/type_hinting/__init__.py b/pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/subdirtwo/Hello
similarity index 100%
rename from pymode/libs/rope/base/oi/type_hinting/__init__.py
rename to pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/subdirtwo/Hello
diff --git a/pymode/libs/rope/base/oi/type_hinting/providers/__init__.py b/pymode/libs/logilab-common-1.4.1/test/data/find_test/__init__.py
similarity index 100%
rename from pymode/libs/rope/base/oi/type_hinting/providers/__init__.py
rename to pymode/libs/logilab-common-1.4.1/test/data/find_test/__init__.py
diff --git a/pymode/libs/rope/base/oi/type_hinting/resolvers/__init__.py b/pymode/libs/logilab-common-1.4.1/test/data/find_test/foo.txt
similarity index 100%
rename from pymode/libs/rope/base/oi/type_hinting/resolvers/__init__.py
rename to pymode/libs/logilab-common-1.4.1/test/data/find_test/foo.txt
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/module.py b/pymode/libs/logilab-common-1.4.1/test/data/find_test/module.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/module2.py b/pymode/libs/logilab-common-1.4.1/test/data/find_test/module2.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/newlines.txt b/pymode/libs/logilab-common-1.4.1/test/data/find_test/newlines.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/noendingnewline.py b/pymode/libs/logilab-common-1.4.1/test/data/find_test/noendingnewline.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/nonregr.py b/pymode/libs/logilab-common-1.4.1/test/data/find_test/nonregr.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/normal_file.txt b/pymode/libs/logilab-common-1.4.1/test/data/find_test/normal_file.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/spam.txt b/pymode/libs/logilab-common-1.4.1/test/data/find_test/spam.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/sub/doc.txt b/pymode/libs/logilab-common-1.4.1/test/data/find_test/sub/doc.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/sub/momo.py b/pymode/libs/logilab-common-1.4.1/test/data/find_test/sub/momo.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/test.ini b/pymode/libs/logilab-common-1.4.1/test/data/find_test/test.ini
new file mode 100644
index 00000000..e69de29b
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/test1.msg b/pymode/libs/logilab-common-1.4.1/test/data/find_test/test1.msg
new file mode 100644
index 00000000..e69de29b
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/test2.msg b/pymode/libs/logilab-common-1.4.1/test/data/find_test/test2.msg
new file mode 100644
index 00000000..e69de29b
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/write_protected_file.txt b/pymode/libs/logilab-common-1.4.1/test/data/find_test/write_protected_file.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/foo.txt b/pymode/libs/logilab-common-1.4.1/test/data/foo.txt
new file mode 100644
index 00000000..a08c29e4
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/foo.txt
@@ -0,0 +1,9 @@
+a
+b
+c
+d
+e
+f
+g
+h
+
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/lmfp/__init__.py b/pymode/libs/logilab-common-1.4.1/test/data/lmfp/__init__.py
new file mode 100644
index 00000000..74b26b82
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/lmfp/__init__.py
@@ -0,0 +1,2 @@
+# force a "direct" python import
+from . import foo
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/lmfp/foo.py b/pymode/libs/logilab-common-1.4.1/test/data/lmfp/foo.py
new file mode 100644
index 00000000..8f7de1e8
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/lmfp/foo.py
@@ -0,0 +1,6 @@
+import sys
+if not getattr(sys, 'bar', None):
+ sys.just_once = []
+# there used to be two numbers here because
+# of a load_module_from_path bug
+sys.just_once.append(42)
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/module.py b/pymode/libs/logilab-common-1.4.1/test/data/module.py
new file mode 100644
index 00000000..493e6762
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/module.py
@@ -0,0 +1,69 @@
+# -*- coding: Latin-1 -*-
+"""test module for astng
+"""
+from __future__ import print_function
+
+from logilab.common import modutils, Execute as spawn
+from logilab.common.astutils import *
+import os.path
+
+MY_DICT = {}
+
+
+def global_access(key, val):
+ """function test"""
+ local = 1
+ MY_DICT[key] = val
+ for i in val:
+ if i:
+ del MY_DICT[i]
+ continue
+ else:
+ break
+ else:
+ print('!!!')
+
+class YO:
+ """hehe"""
+ a=1
+ def __init__(self):
+ try:
+ self.yo = 1
+ except ValueError as ex:
+ pass
+ except (NameError, TypeError):
+ raise XXXError()
+ except:
+ raise
+
+#print('*****>',YO.__dict__)
+class YOUPI(YO):
+ class_attr = None
+
+ def __init__(self):
+ self.member = None
+
+ def method(self):
+ """method test"""
+ global MY_DICT
+ try:
+ MY_DICT = {}
+ local = None
+ autre = [a for a, b in MY_DICT if b]
+ if b in autre:
+ print('yo', end=' ')
+ elif a in autre:
+ print('hehe')
+ global_access(local, val=autre)
+ finally:
+ return local
+
+ def static_method():
+ """static method test"""
+ assert MY_DICT, '???'
+ static_method = staticmethod(static_method)
+
+ def class_method(cls):
+ """class method test"""
+ exec(a, b)
+ class_method = classmethod(class_method)
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/module2.py b/pymode/libs/logilab-common-1.4.1/test/data/module2.py
new file mode 100644
index 00000000..51509f3b
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/module2.py
@@ -0,0 +1,77 @@
+from data.module import YO, YOUPI
+import data
+
+class Specialization(YOUPI, YO): pass
+
+class Metaclass(type): pass
+
+class Interface: pass
+
+class MyIFace(Interface): pass
+
+class AnotherIFace(Interface): pass
+
+class MyException(Exception): pass
+class MyError(MyException): pass
+
+class AbstractClass(object):
+
+ def to_override(self, whatever):
+ raise NotImplementedError()
+
+ def return_something(self, param):
+ if param:
+ return 'toto'
+ return
+
+class Concrete0:
+ __implements__ = MyIFace
+class Concrete1:
+ __implements__ = MyIFace, AnotherIFace
+class Concrete2:
+ __implements__ = (MyIFace,
+ AnotherIFace)
+class Concrete23(Concrete1): pass
+
+del YO.member
+
+del YO
+[SYN1, SYN2] = Concrete0, Concrete1
+assert '1'
+b = 1 | 2 & 3 ^ 8
+exec('c = 3')
+exec('c = 3', {}, {})
+
+def raise_string(a=2, *args, **kwargs):
+ raise 'pas glop'
+ raise Exception('yo')
+ yield 'coucou'
+
+a = b + 2
+c = b * 2
+c = b / 2
+c = b // 2
+c = b - 2
+c = b % 2
+c = b ** 2
+c = b << 2
+c = b >> 2
+c = ~b
+
+c = not b
+
+d = [c]
+e = d[:]
+e = d[a:b:c]
+
+raise_string(*args, **kwargs)
+
+print >> stream, 'bonjour'
+print >> stream, 'salut',
+
+
+def make_class(any, base=data.module.YO, *args, **kwargs):
+ """check base is correctly resolved to Concrete0"""
+ class Aaaa(base):
+ """dynamic class"""
+ return Aaaa
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/newlines.txt b/pymode/libs/logilab-common-1.4.1/test/data/newlines.txt
new file mode 100644
index 00000000..e1f25c09
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/newlines.txt
@@ -0,0 +1,3 @@
+# mixed new lines
+1
+2
3
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/noendingnewline.py b/pymode/libs/logilab-common-1.4.1/test/data/noendingnewline.py
new file mode 100644
index 00000000..110f902d
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/noendingnewline.py
@@ -0,0 +1,36 @@
+from __future__ import print_function
+
+import unittest
+
+
+class TestCase(unittest.TestCase):
+
+ def setUp(self):
+ unittest.TestCase.setUp(self)
+
+
+ def tearDown(self):
+ unittest.TestCase.tearDown(self)
+
+ def testIt(self):
+ self.a = 10
+ self.xxx()
+
+
+ def xxx(self):
+ if False:
+ pass
+ print('a')
+
+ if False:
+ pass
+ pass
+
+ if False:
+ pass
+ print('rara')
+
+
+if __name__ == '__main__':
+ print('test2')
+ unittest.main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/nonregr.py b/pymode/libs/logilab-common-1.4.1/test/data/nonregr.py
new file mode 100644
index 00000000..a4b5ef7d
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/nonregr.py
@@ -0,0 +1,16 @@
+from __future__ import print_function
+
+try:
+ enumerate = enumerate
+except NameError:
+
+ def enumerate(iterable):
+ """emulates the python2.3 enumerate() function"""
+ i = 0
+ for val in iterable:
+ yield i, val
+ i += 1
+
+def toto(value):
+ for k, v in value:
+ print(v.get('yo'))
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/normal_file.txt b/pymode/libs/logilab-common-1.4.1/test/data/normal_file.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/NOTHING b/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/NOTHING
new file mode 100644
index 00000000..e69de29b
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/README b/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/README
new file mode 100644
index 00000000..27ab0b99
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/README
@@ -0,0 +1 @@
+thank you
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/subdir/coin b/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/subdir/coin
new file mode 100644
index 00000000..0e46b314
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/subdir/coin
@@ -0,0 +1 @@
+baba
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/subdir/toto.txt b/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/subdir/toto.txt
new file mode 100644
index 00000000..4bf7233a
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/subdir/toto.txt
@@ -0,0 +1,53 @@
+Lorem ipsum dolor sit amet, consectetuer adipisci elit. Necesse qui
+quidem constituam tantis, et possunt placeat ipsum ex aut iucunde aut
+facta, aut impediente autem totum unum directam eius tum voluptate
+sensuum reperiuntur ad ab, quae ac.. Sed eius enim a, tranquillat ob
+vexetur permagna potius voluptate eo aliae, vivamus esse solis ut non,
+atomis videatur in ut, mihi litteris si ante vivere, deinde
+emancipaverat appetendum sine erant ex metu philosophiae fatemur, et
+magis non corpora ne, maluisti ita locupletiorem medicorum.. Tradere
+imperitos exiguam in sint saluti temeritate hoc, nullam nec quaerat,
+eademque vivendum, contra similique.
+
+Molestiae qui, tam sic ea honesto, graeca consecutionem voluptate
+inertissimae sunt, corpora denique fabulis dicere ab et quae ad
+politus tum in nostris.. Plane pueriliter, hoc affectus quid iis plus
+videtur dolorem vivere ad esse asperiores.. Quorum si nihilo eram
+pedalis pertinax ii minus, referta mediocrem iustitiam acutum quo
+rerum constringendos ex pondere lucilius essent neglexerit insequitur
+a tantum sit error ipso si respirare corrupte referuntur, maiorem..
+Voluptatem a etiam perspici gravissimas, cuius.. Unum morbis ne esse
+conscientia tamen conclusionemque notionem, amentur quam, praeclarorum
+eum consulatu iis invitat solum porro, quidem ad patria, fore res
+athenis sempiternum alii venire, est mei nam improbis dolorem,
+permulta timidiores.
+
+Et inquam sic familias, sequatur animis quae et quae ea esse, autem
+impediri quaeque modo inciderint consecutionem expectata, sed severa
+etiamsi, in egregios temporibus infinito ad artibus, voluptatem
+aristotele, tandem aliquo industriae collegi timiditatem sibi igitur
+aut, se cum tranquillitate loquuntur quod nullo, quam suum illustribus
+fugiendam illis tam consequatur.. Quas maximisque impendere ipsum se
+petat altera enim ocurreret sibi maxime, possit ea aegritudo aut ulla,
+et quod sed.
+
+Verissimum confirmat accurate totam iisque sequitur aut probabo et et
+adhibenda, mihi sed ad et quod erga minima rerum eius quod, tale et
+libidinosarum liber, omnis quae et nunc sicine, nec at aut omnem,
+sententiae a, repudiandae.. Vero esse crudelis amentur ut, atque
+facilius vita invitat, delectus excepturi ex libidinum non qua
+consequi beate quae ratio.. Illa poetis videor requirere, quippiam et
+autem ut et esset voluptate neque consilia sed voluptatibus est
+virtutum minima et, interesse exquirere et peccandi quae carere se,
+angere.. Firme nomine oratio perferendis si voluptates cogitavisse,
+feci maledici ea vis et, nam quae legantur animum animis temeritate,
+amicitiam desideraturam tollatur nisi de voluptatem.
+
+Ii videri accedit de.. Graeci tum factis ea ea itaque sunt latinis
+detractis reprehensiones nostrum sola non tantopere perfruique quoque
+fruenda aptissimum nostrum, pueros graeca qui eruditionem est quae,
+labore.. Omnia si quaerimus, si praetermissum vero deserunt quia
+democriti retinere ignoratione, iam de gerendarum vel a maxime
+provident, in eadem si praeterierunt, certa cibo ut utilitatibus nullo
+quod voluptatis iis eamque omnia, stare aut, quamquam et, ut illa
+susceperant legant consiliisque, est sed quantum igitur.
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/regobjects.py b/pymode/libs/logilab-common-1.4.1/test/data/regobjects.py
new file mode 100644
index 00000000..6cea558b
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/regobjects.py
@@ -0,0 +1,22 @@
+"""unittest_registry data file"""
+from logilab.common.registry import yes, RegistrableObject, RegistrableInstance
+
+class Proxy(object):
+ """annoying object should that not be registered, nor cause error"""
+ def __getattr__(self, attr):
+ return 1
+
+trap = Proxy()
+
+class AppObjectClass(RegistrableObject):
+ __registry__ = 'zereg'
+ __regid__ = 'appobject1'
+ __select__ = yes()
+
+class AppObjectInstance(RegistrableInstance):
+ __registry__ = 'zereg'
+ __select__ = yes()
+ def __init__(self, regid):
+ self.__regid__ = regid
+
+appobject2 = AppObjectInstance('appobject2')
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/regobjects2.py b/pymode/libs/logilab-common-1.4.1/test/data/regobjects2.py
new file mode 100644
index 00000000..091b9f7d
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/regobjects2.py
@@ -0,0 +1,8 @@
+from logilab.common.registry import RegistrableObject, RegistrableInstance, yes
+
+class MyRegistrableInstance(RegistrableInstance):
+ __regid__ = 'appobject3'
+ __select__ = yes()
+ __registry__ = 'zereg'
+
+instance = MyRegistrableInstance(__module__=__name__)
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/same_dir/NOTHING b/pymode/libs/logilab-common-1.4.1/test/data/same_dir/NOTHING
new file mode 100644
index 00000000..e69de29b
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/same_dir/README b/pymode/libs/logilab-common-1.4.1/test/data/same_dir/README
new file mode 100644
index 00000000..27ab0b99
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/same_dir/README
@@ -0,0 +1 @@
+thank you
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/same_dir/subdir/coin b/pymode/libs/logilab-common-1.4.1/test/data/same_dir/subdir/coin
new file mode 100644
index 00000000..0e46b314
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/same_dir/subdir/coin
@@ -0,0 +1 @@
+baba
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/same_dir/subdir/toto.txt b/pymode/libs/logilab-common-1.4.1/test/data/same_dir/subdir/toto.txt
new file mode 100644
index 00000000..4bf7233a
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/same_dir/subdir/toto.txt
@@ -0,0 +1,53 @@
+Lorem ipsum dolor sit amet, consectetuer adipisci elit. Necesse qui
+quidem constituam tantis, et possunt placeat ipsum ex aut iucunde aut
+facta, aut impediente autem totum unum directam eius tum voluptate
+sensuum reperiuntur ad ab, quae ac.. Sed eius enim a, tranquillat ob
+vexetur permagna potius voluptate eo aliae, vivamus esse solis ut non,
+atomis videatur in ut, mihi litteris si ante vivere, deinde
+emancipaverat appetendum sine erant ex metu philosophiae fatemur, et
+magis non corpora ne, maluisti ita locupletiorem medicorum.. Tradere
+imperitos exiguam in sint saluti temeritate hoc, nullam nec quaerat,
+eademque vivendum, contra similique.
+
+Molestiae qui, tam sic ea honesto, graeca consecutionem voluptate
+inertissimae sunt, corpora denique fabulis dicere ab et quae ad
+politus tum in nostris.. Plane pueriliter, hoc affectus quid iis plus
+videtur dolorem vivere ad esse asperiores.. Quorum si nihilo eram
+pedalis pertinax ii minus, referta mediocrem iustitiam acutum quo
+rerum constringendos ex pondere lucilius essent neglexerit insequitur
+a tantum sit error ipso si respirare corrupte referuntur, maiorem..
+Voluptatem a etiam perspici gravissimas, cuius.. Unum morbis ne esse
+conscientia tamen conclusionemque notionem, amentur quam, praeclarorum
+eum consulatu iis invitat solum porro, quidem ad patria, fore res
+athenis sempiternum alii venire, est mei nam improbis dolorem,
+permulta timidiores.
+
+Et inquam sic familias, sequatur animis quae et quae ea esse, autem
+impediri quaeque modo inciderint consecutionem expectata, sed severa
+etiamsi, in egregios temporibus infinito ad artibus, voluptatem
+aristotele, tandem aliquo industriae collegi timiditatem sibi igitur
+aut, se cum tranquillitate loquuntur quod nullo, quam suum illustribus
+fugiendam illis tam consequatur.. Quas maximisque impendere ipsum se
+petat altera enim ocurreret sibi maxime, possit ea aegritudo aut ulla,
+et quod sed.
+
+Verissimum confirmat accurate totam iisque sequitur aut probabo et et
+adhibenda, mihi sed ad et quod erga minima rerum eius quod, tale et
+libidinosarum liber, omnis quae et nunc sicine, nec at aut omnem,
+sententiae a, repudiandae.. Vero esse crudelis amentur ut, atque
+facilius vita invitat, delectus excepturi ex libidinum non qua
+consequi beate quae ratio.. Illa poetis videor requirere, quippiam et
+autem ut et esset voluptate neque consilia sed voluptatibus est
+virtutum minima et, interesse exquirere et peccandi quae carere se,
+angere.. Firme nomine oratio perferendis si voluptates cogitavisse,
+feci maledici ea vis et, nam quae legantur animum animis temeritate,
+amicitiam desideraturam tollatur nisi de voluptatem.
+
+Ii videri accedit de.. Graeci tum factis ea ea itaque sunt latinis
+detractis reprehensiones nostrum sola non tantopere perfruique quoque
+fruenda aptissimum nostrum, pueros graeca qui eruditionem est quae,
+labore.. Omnia si quaerimus, si praetermissum vero deserunt quia
+democriti retinere ignoratione, iam de gerendarum vel a maxime
+provident, in eadem si praeterierunt, certa cibo ut utilitatibus nullo
+quod voluptatis iis eamque omnia, stare aut, quamquam et, ut illa
+susceperant legant consiliisque, est sed quantum igitur.
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/spam.txt b/pymode/libs/logilab-common-1.4.1/test/data/spam.txt
new file mode 100644
index 00000000..068911b1
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/spam.txt
@@ -0,0 +1,9 @@
+a
+b
+c
+h
+e
+f
+g
+h
+
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/sub/doc.txt b/pymode/libs/logilab-common-1.4.1/test/data/sub/doc.txt
new file mode 100644
index 00000000..c60eb160
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/sub/doc.txt
@@ -0,0 +1 @@
+hhh
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/sub/momo.py b/pymode/libs/logilab-common-1.4.1/test/data/sub/momo.py
new file mode 100644
index 00000000..746b5d04
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/sub/momo.py
@@ -0,0 +1,3 @@
+from __future__ import print_function
+
+print('yo')
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/NOTHING b/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/NOTHING
new file mode 100644
index 00000000..e69de29b
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/README b/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/README
new file mode 100644
index 00000000..27ab0b99
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/README
@@ -0,0 +1 @@
+thank you
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/subdir/coin b/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/subdir/coin
new file mode 100644
index 00000000..0e46b314
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/subdir/coin
@@ -0,0 +1 @@
+baba
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/subdir/toto.txt b/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/subdir/toto.txt
new file mode 100644
index 00000000..4bf7233a
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/subdir/toto.txt
@@ -0,0 +1,53 @@
+Lorem ipsum dolor sit amet, consectetuer adipisci elit. Necesse qui
+quidem constituam tantis, et possunt placeat ipsum ex aut iucunde aut
+facta, aut impediente autem totum unum directam eius tum voluptate
+sensuum reperiuntur ad ab, quae ac.. Sed eius enim a, tranquillat ob
+vexetur permagna potius voluptate eo aliae, vivamus esse solis ut non,
+atomis videatur in ut, mihi litteris si ante vivere, deinde
+emancipaverat appetendum sine erant ex metu philosophiae fatemur, et
+magis non corpora ne, maluisti ita locupletiorem medicorum.. Tradere
+imperitos exiguam in sint saluti temeritate hoc, nullam nec quaerat,
+eademque vivendum, contra similique.
+
+Molestiae qui, tam sic ea honesto, graeca consecutionem voluptate
+inertissimae sunt, corpora denique fabulis dicere ab et quae ad
+politus tum in nostris.. Plane pueriliter, hoc affectus quid iis plus
+videtur dolorem vivere ad esse asperiores.. Quorum si nihilo eram
+pedalis pertinax ii minus, referta mediocrem iustitiam acutum quo
+rerum constringendos ex pondere lucilius essent neglexerit insequitur
+a tantum sit error ipso si respirare corrupte referuntur, maiorem..
+Voluptatem a etiam perspici gravissimas, cuius.. Unum morbis ne esse
+conscientia tamen conclusionemque notionem, amentur quam, praeclarorum
+eum consulatu iis invitat solum porro, quidem ad patria, fore res
+athenis sempiternum alii venire, est mei nam improbis dolorem,
+permulta timidiores.
+
+Et inquam sic familias, sequatur animis quae et quae ea esse, autem
+impediri quaeque modo inciderint consecutionem expectata, sed severa
+etiamsi, in egregios temporibus infinito ad artibus, voluptatem
+aristotele, tandem aliquo industriae collegi timiditatem sibi igitur
+aut, se cum tranquillitate loquuntur quod nullo, quam suum illustribus
+fugiendam illis tam consequatur.. Quas maximisque impendere ipsum se
+petat altera enim ocurreret sibi maxime, possit ea aegritudo aut ulla,
+et quod sed.
+
+Verissimum confirmat accurate totam iisque sequitur aut probabo et et
+adhibenda, mihi sed ad et quod erga minima rerum eius quod, tale et
+libidinosarum liber, omnis quae et nunc sicine, nec at aut omnem,
+sententiae a, repudiandae.. Vero esse crudelis amentur ut, atque
+facilius vita invitat, delectus excepturi ex libidinum non qua
+consequi beate quae ratio.. Illa poetis videor requirere, quippiam et
+autem ut et esset voluptate neque consilia sed voluptatibus est
+virtutum minima et, interesse exquirere et peccandi quae carere se,
+angere.. Firme nomine oratio perferendis si voluptates cogitavisse,
+feci maledici ea vis et, nam quae legantur animum animis temeritate,
+amicitiam desideraturam tollatur nisi de voluptatem.
+
+Ii videri accedit de.. Graeci tum factis ea ea itaque sunt latinis
+detractis reprehensiones nostrum sola non tantopere perfruique quoque
+fruenda aptissimum nostrum, pueros graeca qui eruditionem est quae,
+labore.. Omnia si quaerimus, si praetermissum vero deserunt quia
+democriti retinere ignoratione, iam de gerendarum vel a maxime
+provident, in eadem si praeterierunt, certa cibo ut utilitatibus nullo
+quod voluptatis iis eamque omnia, stare aut, quamquam et, ut illa
+susceperant legant consiliisque, est sed quantum igitur.
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/test.ini b/pymode/libs/logilab-common-1.4.1/test/data/test.ini
new file mode 100644
index 00000000..3785702c
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/test.ini
@@ -0,0 +1,20 @@
+# test configuration
+[TEST]
+
+dothis=yes
+
+value=' '
+
+# you can also document the option
+multiple=yop
+
+number=2
+
+#choice
+renamed=yo
+
+multiple-choice=yo,ye
+
+
+[OLD]
+named=key:val
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/test1.msg b/pymode/libs/logilab-common-1.4.1/test/data/test1.msg
new file mode 100644
index 00000000..33b75c83
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/test1.msg
@@ -0,0 +1,30 @@
+From Nicolas.Chauvat@logilab.fr Wed Jul 20 12:03:06 2005
+Return-Path:
+X-Original-To: nico@logilab.fr
+Delivered-To: nico@logilab.fr
+Received: from logilab.fr (crater.logilab.fr [172.17.1.4])
+ by orion.logilab.fr (Postfix) with SMTP id 7D3412BDA6
+ for ; Wed, 20 Jul 2005 12:03:06 +0200 (CEST)
+Received: (nullmailer pid 8382 invoked by uid 1000);
+ Wed, 20 Jul 2005 10:03:20 -0000
+Date: Wed, 20 Jul 2005 12:03:20 +0200
+From: Nicolas Chauvat
+To: Nicolas Chauvat
+Subject: autre message
+Message-ID: <20050720100320.GA8371@logilab.fr>
+Mime-Version: 1.0
+Content-Type: text/plain; charset=utf-8
+Content-Disposition: inline
+Content-Transfer-Encoding: 8bit
+User-Agent: Mutt/1.5.9i
+X-Spambayes-Classification: ham; 0.01
+Content-Length: 106
+Lines: 6
+
+bonjour
+
+--
+Nicolas Chauvat
+
+logilab.fr - services en informatique avancée et gestion de connaissances
+
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/test2.msg b/pymode/libs/logilab-common-1.4.1/test/data/test2.msg
new file mode 100644
index 00000000..3a5ca812
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/data/test2.msg
@@ -0,0 +1,42 @@
+From alexandre.fayolle@logilab.fr Wed Jul 27 11:21:57 2005
+Date: Wed, 27 Jul 2005 11:21:57 +0200
+From: Alexandre =?iso-8859-1?Q?'d=E9couvreur?= de bugs' Fayolle
+To: =?iso-8859-1?B?6WzpbWVudCDg?= accents
+Subject: =?iso-8859-1?Q?=C0?= LA MER
+Message-ID: <20050727092157.GB3923@logilab.fr>
+Mime-Version: 1.0
+Content-Type: multipart/signed; micalg=pgp-sha1;
+ protocol="application/pgp-signature"; boundary="wULyF7TL5taEdwHz"
+Content-Disposition: inline
+User-Agent: Mutt/1.5.9i
+Status: RO
+Content-Length: 692
+Lines: 26
+
+
+--wULyF7TL5taEdwHz
+Content-Type: text/plain; charset=iso-8859-1
+Content-Disposition: inline
+Content-Transfer-Encoding: quoted-printable
+
+il s'est pass=E9 de dr=F4les de choses.=20
+
+--=20
+Alexandre Fayolle LOGILAB, Paris (France).
+http://www.logilab.com http://www.logilab.fr http://www.logilab.org
+
+--wULyF7TL5taEdwHz
+Content-Type: application/pgp-signature; name="signature.asc"
+Content-Description: Digital signature
+Content-Disposition: inline
+
+-----BEGIN PGP SIGNATURE-----
+Version: GnuPG v1.4.1 (GNU/Linux)
+
+iD8DBQFC51I1Ll/b4N9npV4RAsaLAJ4k9C8Hnrjg+Q3ocrUYnYppTVcgyQCeO8yT
+B7AM5XzlRD1lYqlxq+h80K8=
+=zfVV
+-----END PGP SIGNATURE-----
+
+--wULyF7TL5taEdwHz--
+
diff --git a/pymode/libs/logilab-common-1.4.1/test/data/write_protected_file.txt b/pymode/libs/logilab-common-1.4.1/test/data/write_protected_file.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_cache.py b/pymode/libs/logilab-common-1.4.1/test/unittest_cache.py
new file mode 100644
index 00000000..459f1720
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_cache.py
@@ -0,0 +1,129 @@
+# unit tests for the cache module
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+
+from logilab.common.testlib import TestCase, unittest_main, TestSuite
+from logilab.common.cache import Cache
+
+class CacheTestCase(TestCase):
+
+ def setUp(self):
+ self.cache = Cache(5)
+ self.testdict = {}
+
+ def test_setitem1(self):
+ """Checks that the setitem method works"""
+ self.cache[1] = 'foo'
+ self.assertEqual(self.cache[1], 'foo', "1:foo is not in cache")
+ self.assertEqual(len(self.cache._usage), 1)
+ self.assertEqual(self.cache._usage[-1], 1,
+ '1 is not the most recently used key')
+ self.assertCountEqual(self.cache._usage,
+ self.cache.keys(),
+ "usage list and data keys are different")
+
+ def test_setitem2(self):
+ """Checks that the setitem method works for multiple items"""
+ self.cache[1] = 'foo'
+ self.cache[2] = 'bar'
+ self.assertEqual(self.cache[2], 'bar',
+ "2 : 'bar' is not in cache.data")
+ self.assertEqual(len(self.cache._usage), 2,
+ "lenght of usage list is not 2")
+ self.assertEqual(self.cache._usage[-1], 2,
+ '1 is not the most recently used key')
+ self.assertCountEqual(self.cache._usage,
+ self.cache.keys())# usage list and data keys are different
+
+ def test_setitem3(self):
+ """Checks that the setitem method works when replacing an element in the cache"""
+ self.cache[1] = 'foo'
+ self.cache[1] = 'bar'
+ self.assertEqual(self.cache[1], 'bar', "1 : 'bar' is not in cache.data")
+ self.assertEqual(len(self.cache._usage), 1, "lenght of usage list is not 1")
+ self.assertEqual(self.cache._usage[-1], 1, '1 is not the most recently used key')
+ self.assertCountEqual(self.cache._usage,
+ self.cache.keys())# usage list and data keys are different
+
+ def test_recycling1(self):
+ """Checks the removal of old elements"""
+ self.cache[1] = 'foo'
+ self.cache[2] = 'bar'
+ self.cache[3] = 'baz'
+ self.cache[4] = 'foz'
+ self.cache[5] = 'fuz'
+ self.cache[6] = 'spam'
+ self.assertTrue(1 not in self.cache,
+ 'key 1 has not been suppressed from the cache dictionnary')
+ self.assertTrue(1 not in self.cache._usage,
+ 'key 1 has not been suppressed from the cache LRU list')
+ self.assertEqual(len(self.cache._usage), 5, "lenght of usage list is not 5")
+ self.assertEqual(self.cache._usage[-1], 6, '6 is not the most recently used key')
+ self.assertCountEqual(self.cache._usage,
+ self.cache.keys())# usage list and data keys are different
+
+ def test_recycling2(self):
+ """Checks that accessed elements get in the front of the list"""
+ self.cache[1] = 'foo'
+ self.cache[2] = 'bar'
+ self.cache[3] = 'baz'
+ self.cache[4] = 'foz'
+ a = self.cache[1]
+ self.assertEqual(a, 'foo')
+ self.assertEqual(self.cache._usage[-1], 1, '1 is not the most recently used key')
+ self.assertCountEqual(self.cache._usage,
+ self.cache.keys())# usage list and data keys are different
+
+ def test_delitem(self):
+ """Checks that elements are removed from both element dict and element
+ list.
+ """
+ self.cache['foo'] = 'bar'
+ del self.cache['foo']
+ self.assertTrue('foo' not in self.cache.keys(), "Element 'foo' was not removed cache dictionnary")
+ self.assertTrue('foo' not in self.cache._usage, "Element 'foo' was not removed usage list")
+ self.assertCountEqual(self.cache._usage,
+ self.cache.keys())# usage list and data keys are different
+
+
+ def test_nullsize(self):
+ """Checks that a 'NULL' size cache doesn't store anything
+ """
+ null_cache = Cache(0)
+ null_cache['foo'] = 'bar'
+ self.assertEqual(null_cache.size, 0, 'Cache size should be O, not %d' % \
+ null_cache.size)
+ self.assertEqual(len(null_cache), 0, 'Cache should be empty !')
+ # Assert null_cache['foo'] raises a KeyError
+ self.assertRaises(KeyError, null_cache.__getitem__, 'foo')
+ # Deleting element raises a KeyError
+ self.assertRaises(KeyError, null_cache.__delitem__, 'foo')
+
+ def test_getitem(self):
+ """ Checks that getitem doest not modify the _usage attribute
+ """
+ try:
+ self.cache['toto']
+ except KeyError:
+ self.assertTrue('toto' not in self.cache._usage)
+ else:
+ self.fail('excepted KeyError')
+
+
+if __name__ == "__main__":
+ unittest_main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_changelog.py b/pymode/libs/logilab-common-1.4.1/test/unittest_changelog.py
new file mode 100644
index 00000000..c2572d70
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_changelog.py
@@ -0,0 +1,40 @@
+# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with logilab-common. If not, see .
+
+from os.path import join, dirname
+
+from io import StringIO
+from logilab.common.testlib import TestCase, unittest_main
+
+from logilab.common.changelog import ChangeLog
+
+
+class ChangeLogTC(TestCase):
+ cl_class = ChangeLog
+ cl_file = join(dirname(__file__), 'data', 'ChangeLog')
+
+ def test_round_trip(self):
+ cl = self.cl_class(self.cl_file)
+ out = StringIO()
+ cl.write(out)
+ with open(self.cl_file) as stream:
+ self.assertMultiLineEqual(stream.read(), out.getvalue())
+
+
+if __name__ == '__main__':
+ unittest_main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_configuration.py b/pymode/libs/logilab-common-1.4.1/test/unittest_configuration.py
new file mode 100644
index 00000000..ea7cdca6
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_configuration.py
@@ -0,0 +1,509 @@
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+import tempfile
+import os
+from os.path import join, dirname, abspath
+import re
+
+from sys import version_info
+
+from six import integer_types
+
+from logilab.common import attrdict
+from logilab.common.compat import StringIO
+from logilab.common.testlib import TestCase, unittest_main
+from logilab.common.optik_ext import OptionValueError
+from logilab.common.configuration import Configuration, OptionError, \
+ OptionsManagerMixIn, OptionsProviderMixIn, Method, read_old_config, \
+ merge_options
+
+DATA = join(dirname(abspath(__file__)), 'data')
+
+OPTIONS = [('dothis', {'type':'yn', 'action': 'store', 'default': True, 'metavar': ''}),
+ ('value', {'type': 'string', 'metavar': '', 'short': 'v'}),
+ ('multiple', {'type': 'csv', 'default': ['yop', 'yep'],
+ 'metavar': '',
+ 'help': 'you can also document the option'}),
+ ('number', {'type': 'int', 'default':2, 'metavar':'', 'help': 'boom'}),
+ ('bytes', {'type': 'bytes', 'default':'1KB', 'metavar':''}),
+ ('choice', {'type': 'choice', 'default':'yo', 'choices': ('yo', 'ye'),
+ 'metavar':''}),
+ ('multiple-choice', {'type': 'multiple_choice', 'default':['yo', 'ye'],
+ 'choices': ('yo', 'ye', 'yu', 'yi', 'ya'),
+ 'metavar':''}),
+ ('named', {'type':'named', 'default':Method('get_named'),
+ 'metavar': ''}),
+
+ ('diffgroup', {'type':'string', 'default':'pouet', 'metavar': '',
+ 'group': 'agroup'}),
+ ('reset-value', {'type': 'string', 'metavar': '', 'short': 'r',
+ 'dest':'value'}),
+
+ ('opt-b-1', {'type': 'string', 'metavar': '', 'group': 'bgroup'}),
+ ('opt-b-2', {'type': 'string', 'metavar': '', 'group': 'bgroup'}),
+ ]
+
+class MyConfiguration(Configuration):
+ """test configuration"""
+ def get_named(self):
+ return {'key': 'val'}
+
+class ConfigurationTC(TestCase):
+
+ def setUp(self):
+ self.cfg = MyConfiguration(name='test', options=OPTIONS, usage='Just do it ! (tm)')
+
+ def test_default(self):
+ cfg = self.cfg
+ self.assertEqual(cfg['dothis'], True)
+ self.assertEqual(cfg['value'], None)
+ self.assertEqual(cfg['multiple'], ['yop', 'yep'])
+ self.assertEqual(cfg['number'], 2)
+ self.assertEqual(cfg['bytes'], 1024)
+ self.assertIsInstance(cfg['bytes'], integer_types)
+ self.assertEqual(cfg['choice'], 'yo')
+ self.assertEqual(cfg['multiple-choice'], ['yo', 'ye'])
+ self.assertEqual(cfg['named'], {'key': 'val'})
+
+ def test_base(self):
+ cfg = self.cfg
+ cfg.set_option('number', '0')
+ self.assertEqual(cfg['number'], 0)
+ self.assertRaises(OptionValueError, cfg.set_option, 'number', 'youpi')
+ self.assertRaises(OptionValueError, cfg.set_option, 'choice', 'youpi')
+ self.assertRaises(OptionValueError, cfg.set_option, 'multiple-choice', ('yo', 'y', 'ya'))
+ cfg.set_option('multiple-choice', 'yo, ya')
+ self.assertEqual(cfg['multiple-choice'], ['yo', 'ya'])
+ self.assertEqual(cfg.get('multiple-choice'), ['yo', 'ya'])
+ self.assertEqual(cfg.get('whatever'), None)
+
+ def test_load_command_line_configuration(self):
+ cfg = self.cfg
+ args = cfg.load_command_line_configuration(['--choice', 'ye', '--number', '4',
+ '--multiple=1,2,3', '--dothis=n',
+ '--bytes=10KB',
+ 'other', 'arguments'])
+ self.assertEqual(args, ['other', 'arguments'])
+ self.assertEqual(cfg['dothis'], False)
+ self.assertEqual(cfg['multiple'], ['1', '2', '3'])
+ self.assertEqual(cfg['number'], 4)
+ self.assertEqual(cfg['bytes'], 10240)
+ self.assertEqual(cfg['choice'], 'ye')
+ self.assertEqual(cfg['value'], None)
+ args = cfg.load_command_line_configuration(['-v', 'duh'])
+ self.assertEqual(args, [])
+ self.assertEqual(cfg['value'], 'duh')
+ self.assertEqual(cfg['dothis'], False)
+ self.assertEqual(cfg['multiple'], ['1', '2', '3'])
+ self.assertEqual(cfg['number'], 4)
+ self.assertEqual(cfg['bytes'], 10240)
+ self.assertEqual(cfg['choice'], 'ye')
+
+ def test_load_configuration(self):
+ cfg = self.cfg
+ args = cfg.load_configuration(choice='ye', number='4',
+ multiple='1,2,3', dothis='n',
+ multiple_choice=('yo', 'ya'))
+ self.assertEqual(cfg['dothis'], False)
+ self.assertEqual(cfg['multiple'], ['1', '2', '3'])
+ self.assertEqual(cfg['number'], 4)
+ self.assertEqual(cfg['choice'], 'ye')
+ self.assertEqual(cfg['value'], None)
+ self.assertEqual(cfg['multiple-choice'], ('yo', 'ya'))
+
+ def test_load_configuration_file_case_insensitive(self):
+ file = tempfile.mktemp()
+ stream = open(file, 'w')
+ try:
+ stream.write("""[Test]
+
+dothis=no
+
+#value=
+
+# you can also document the option
+multiple=yop,yepii
+
+# boom
+number=3
+
+bytes=1KB
+
+choice=yo
+
+multiple-choice=yo,ye
+
+named=key:val
+
+
+[agroup]
+
+diffgroup=zou
+""")
+ stream.close()
+ self.cfg.load_file_configuration(file)
+ self.assertEqual(self.cfg['dothis'], False)
+ self.assertEqual(self.cfg['value'], None)
+ self.assertEqual(self.cfg['multiple'], ['yop', 'yepii'])
+ self.assertEqual(self.cfg['diffgroup'], 'zou')
+ finally:
+ os.remove(file)
+
+ def test_option_order(self):
+ """ Check that options are taken into account in the command line order
+ and not in the order they are defined in the Configuration object.
+ """
+ file = tempfile.mktemp()
+ stream = open(file, 'w')
+ try:
+ stream.write("""[Test]
+reset-value=toto
+value=tata
+""")
+ stream.close()
+ self.cfg.load_file_configuration(file)
+ finally:
+ os.remove(file)
+ self.assertEqual(self.cfg['value'], 'tata')
+
+ def test_unsupported_options(self):
+ file = tempfile.mktemp()
+ stream = open(file, 'w')
+ try:
+ stream.write("""[Test]
+whatever=toto
+value=tata
+""")
+ stream.close()
+ self.cfg.load_file_configuration(file)
+ finally:
+ os.remove(file)
+ self.assertEqual(self.cfg['value'], 'tata')
+ self.assertRaises(OptionError, self.cfg.__getitem__, 'whatever')
+
+ def test_generate_config(self):
+ stream = StringIO()
+ self.cfg.generate_config(stream)
+ self.assertMultiLineEqual(stream.getvalue().strip(), """[TEST]
+
+dothis=yes
+
+#value=
+
+# you can also document the option
+multiple=yop,yep
+
+# boom
+number=2
+
+bytes=1KB
+
+choice=yo
+
+multiple-choice=yo,ye
+
+named=key:val
+
+#reset-value=
+
+
+[AGROUP]
+
+diffgroup=pouet
+
+
+[BGROUP]
+
+#opt-b-1=
+
+#opt-b-2=""")
+
+ def test_generate_config_with_space_string(self):
+ self.cfg['value'] = ' '
+ stream = StringIO()
+ self.cfg.generate_config(stream)
+ self.assertMultiLineEqual(stream.getvalue().strip(), """[TEST]
+
+dothis=yes
+
+value=' '
+
+# you can also document the option
+multiple=yop,yep
+
+# boom
+number=2
+
+bytes=1KB
+
+choice=yo
+
+multiple-choice=yo,ye
+
+named=key:val
+
+reset-value=' '
+
+
+[AGROUP]
+
+diffgroup=pouet
+
+
+[BGROUP]
+
+#opt-b-1=
+
+#opt-b-2=""")
+
+ def test_generate_config_with_multiline_string(self):
+ self.cfg['value'] = 'line1\nline2\nline3'
+ stream = StringIO()
+ self.cfg.generate_config(stream)
+ self.assertMultiLineEqual(stream.getvalue().strip(), """[TEST]
+
+dothis=yes
+
+value=
+ line1
+ line2
+ line3
+
+# you can also document the option
+multiple=yop,yep
+
+# boom
+number=2
+
+bytes=1KB
+
+choice=yo
+
+multiple-choice=yo,ye
+
+named=key:val
+
+reset-value=
+ line1
+ line2
+ line3
+
+
+[AGROUP]
+
+diffgroup=pouet
+
+
+[BGROUP]
+
+#opt-b-1=
+
+#opt-b-2=""")
+
+
+ def test_roundtrip(self):
+ cfg = self.cfg
+ f = tempfile.mktemp()
+ stream = open(f, 'w')
+ try:
+ self.cfg['dothis'] = False
+ self.cfg['multiple'] = ["toto", "tata"]
+ self.cfg['number'] = 3
+ self.cfg['bytes'] = 2048
+ cfg.generate_config(stream)
+ stream.close()
+ new_cfg = MyConfiguration(name='test', options=OPTIONS)
+ new_cfg.load_file_configuration(f)
+ self.assertEqual(cfg['dothis'], new_cfg['dothis'])
+ self.assertEqual(cfg['multiple'], new_cfg['multiple'])
+ self.assertEqual(cfg['number'], new_cfg['number'])
+ self.assertEqual(cfg['bytes'], new_cfg['bytes'])
+ self.assertEqual(cfg['choice'], new_cfg['choice'])
+ self.assertEqual(cfg['value'], new_cfg['value'])
+ self.assertEqual(cfg['multiple-choice'], new_cfg['multiple-choice'])
+ finally:
+ os.remove(f)
+
+ def test_setitem(self):
+ self.assertRaises(OptionValueError,
+ self.cfg.__setitem__, 'multiple-choice', ('a', 'b'))
+ self.cfg['multiple-choice'] = ('yi', 'ya')
+ self.assertEqual(self.cfg['multiple-choice'], ('yi', 'ya'))
+
+ def test_help(self):
+ self.cfg.add_help_section('bonus', 'a nice additional help')
+ help = self.cfg.help().strip()
+ # at least in python 2.4.2 the output is:
+ # ' -v , --value='
+ # it is not unlikely some optik/optparse versions do print -v
+ # so accept both
+ help = help.replace(' -v , ', ' -v, ')
+ help = re.sub('[ ]*(\r?\n)', '\\1', help)
+ USAGE = """Usage: Just do it ! (tm)
+
+Options:
+ -h, --help show this help message and exit
+ --dothis=
+ -v, --value=
+ --multiple=
+ you can also document the option [current: yop,yep]
+ --number= boom [current: 2]
+ --bytes=
+ --choice=
+ --multiple-choice=
+ --named=
+ -r , --reset-value=
+
+ Agroup:
+ --diffgroup=
+
+ Bgroup:
+ --opt-b-1=
+ --opt-b-2=
+
+ Bonus:
+ a nice additional help"""
+ if version_info < (2, 5):
+ # 'usage' header is not capitalized in this version
+ USAGE = USAGE.replace('Usage: ', 'usage: ')
+ elif version_info < (2, 4):
+ USAGE = """usage: Just do it ! (tm)
+
+options:
+ -h, --help show this help message and exit
+ --dothis=
+ -v, --value=
+ --multiple=
+ you can also document the option
+ --number=
+ --choice=
+ --multiple-choice=
+ --named=
+
+ Bonus:
+ a nice additional help
+"""
+ self.assertMultiLineEqual(help, USAGE)
+
+
+ def test_manpage(self):
+ pkginfo = {}
+ with open(join(DATA, '__pkginfo__.py')) as fobj:
+ exec(fobj.read(), pkginfo)
+ self.cfg.generate_manpage(attrdict(pkginfo), stream=StringIO())
+
+ def test_rewrite_config(self):
+ changes = [('renamed', 'renamed', 'choice'),
+ ('moved', 'named', 'old', 'test'),
+ ]
+ read_old_config(self.cfg, changes, join(DATA, 'test.ini'))
+ stream = StringIO()
+ self.cfg.generate_config(stream)
+ self.assertMultiLineEqual(stream.getvalue().strip(), """[TEST]
+
+dothis=yes
+
+value=' '
+
+# you can also document the option
+multiple=yop
+
+# boom
+number=2
+
+bytes=1KB
+
+choice=yo
+
+multiple-choice=yo,ye
+
+named=key:val
+
+reset-value=' '
+
+
+[AGROUP]
+
+diffgroup=pouet
+
+
+[BGROUP]
+
+#opt-b-1=
+
+#opt-b-2=""")
+
+class Linter(OptionsManagerMixIn, OptionsProviderMixIn):
+ options = (
+ ('profile', {'type' : 'yn', 'metavar' : '',
+ 'default': False,
+ 'help' : 'Profiled execution.'}),
+ )
+ def __init__(self):
+ OptionsManagerMixIn.__init__(self, usage="")
+ OptionsProviderMixIn.__init__(self)
+ self.register_options_provider(self)
+ self.load_provider_defaults()
+
+class RegrTC(TestCase):
+
+ def setUp(self):
+ self.linter = Linter()
+
+ def test_load_defaults(self):
+ self.linter.load_command_line_configuration([])
+ self.assertEqual(self.linter.config.profile, False)
+
+ def test_register_options_multiple_groups(self):
+ """ensure multiple option groups can be registered at once"""
+ config = Configuration()
+ self.assertEqual(config.options, ())
+ new_options = (
+ ('option1', {'type': 'string', 'help': '',
+ 'group': 'g1', 'level': 2}),
+ ('option2', {'type': 'string', 'help': '',
+ 'group': 'g1', 'level': 2}),
+ ('option3', {'type': 'string', 'help': '',
+ 'group': 'g2', 'level': 2}),
+ )
+ config.register_options(new_options)
+ self.assertEqual(config.options, new_options)
+
+
+class MergeTC(TestCase):
+
+ def test_merge1(self):
+ merged = merge_options([('dothis', {'type':'yn', 'action': 'store', 'default': True, 'metavar': ''}),
+ ('dothis', {'type':'yn', 'action': 'store', 'default': False, 'metavar': ''}),
+ ])
+ self.assertEqual(len(merged), 1)
+ self.assertEqual(merged[0][0], 'dothis')
+ self.assertEqual(merged[0][1]['default'], True)
+
+ def test_merge2(self):
+ merged = merge_options([('dothis', {'type':'yn', 'action': 'store', 'default': True, 'metavar': ''}),
+ ('value', {'type': 'string', 'metavar': '', 'short': 'v'}),
+ ('dothis', {'type':'yn', 'action': 'store', 'default': False, 'metavar': ''}),
+ ])
+ self.assertEqual(len(merged), 2)
+ self.assertEqual(merged[0][0], 'value')
+ self.assertEqual(merged[1][0], 'dothis')
+ self.assertEqual(merged[1][1]['default'], True)
+
+if __name__ == '__main__':
+ unittest_main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_date.py b/pymode/libs/logilab-common-1.4.1/test/unittest_date.py
new file mode 100644
index 00000000..9ae444bb
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_date.py
@@ -0,0 +1,206 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+"""
+Unittests for date helpers
+"""
+from logilab.common.testlib import TestCase, unittest_main, tag
+
+from logilab.common.date import (date_range, endOfMonth, add_days_worked,
+ nb_open_days, get_national_holidays, ustrftime, ticks2datetime,
+ utcdatetime, datetime2ticks)
+
+from datetime import date, datetime, timedelta
+from calendar import timegm
+import pytz
+
+try:
+ from mx.DateTime import Date as mxDate, DateTime as mxDateTime, \
+ now as mxNow, RelativeDateTime, RelativeDate
+except ImportError:
+ mxDate = mxDateTime = RelativeDateTime = mxNow = None
+
+class DateTC(TestCase):
+ datecls = date
+ datetimecls = datetime
+ timedeltacls = timedelta
+ now = datetime.now
+
+ def test_day(self):
+ """enumerate days"""
+ r = list(date_range(self.datecls(2000, 1, 1), self.datecls(2000, 1, 4)))
+ expected = [self.datecls(2000, 1, 1), self.datecls(2000, 1, 2), self.datecls(2000, 1, 3)]
+ self.assertListEqual(r, expected)
+ r = list(date_range(self.datecls(2000, 1, 31), self.datecls(2000, 2, 3)))
+ expected = [self.datecls(2000, 1, 31), self.datecls(2000, 2, 1), self.datecls(2000, 2, 2)]
+ self.assertListEqual(r, expected)
+ r = list(date_range(self.datecls(2000, 1, 1), self.datecls(2000, 1, 6), 2))
+ expected = [self.datecls(2000, 1, 1), self.datecls(2000, 1, 3), self.datecls(2000, 1, 5)]
+ self.assertListEqual(r, expected)
+
+ def test_add_days_worked(self):
+ add = add_days_worked
+ # normal
+ self.assertEqual(add(self.datecls(2008, 1, 3), 1), self.datecls(2008, 1, 4))
+ # skip week-end
+ self.assertEqual(add(self.datecls(2008, 1, 3), 2), self.datecls(2008, 1, 7))
+ # skip 2 week-ends
+ self.assertEqual(add(self.datecls(2008, 1, 3), 8), self.datecls(2008, 1, 15))
+ # skip holiday + week-end
+ self.assertEqual(add(self.datecls(2008, 4, 30), 2), self.datecls(2008, 5, 5))
+
+ def test_get_national_holidays(self):
+ holidays = get_national_holidays
+ yield self.assertEqual, holidays(self.datecls(2008, 4, 29), self.datecls(2008, 5, 2)), \
+ [self.datecls(2008, 5, 1)]
+ yield self.assertEqual, holidays(self.datecls(2008, 5, 7), self.datecls(2008, 5, 8)), []
+ x = self.datetimecls(2008, 5, 7, 12, 12, 12)
+ yield self.assertEqual, holidays(x, x + self.timedeltacls(days=1)), []
+
+ def test_open_days_now_and_before(self):
+ nb = nb_open_days
+ x = self.now()
+ y = x - self.timedeltacls(seconds=1)
+ self.assertRaises(AssertionError, nb, x, y)
+
+ def assertOpenDays(self, start, stop, expected):
+ got = nb_open_days(start, stop)
+ self.assertEqual(got, expected)
+
+ def test_open_days_tuesday_friday(self):
+ self.assertOpenDays(self.datecls(2008, 3, 4), self.datecls(2008, 3, 7), 3)
+
+ def test_open_days_day_nextday(self):
+ self.assertOpenDays(self.datecls(2008, 3, 4), self.datecls(2008, 3, 5), 1)
+
+ def test_open_days_friday_monday(self):
+ self.assertOpenDays(self.datecls(2008, 3, 7), self.datecls(2008, 3, 10), 1)
+
+ def test_open_days_friday_monday_with_two_weekends(self):
+ self.assertOpenDays(self.datecls(2008, 3, 7), self.datecls(2008, 3, 17), 6)
+
+ def test_open_days_tuesday_wednesday(self):
+ """week-end + easter monday"""
+ self.assertOpenDays(self.datecls(2008, 3, 18), self.datecls(2008, 3, 26), 5)
+
+ def test_open_days_friday_saturday(self):
+ self.assertOpenDays(self.datecls(2008, 3, 7), self.datecls(2008, 3, 8), 1)
+
+ def test_open_days_friday_sunday(self):
+ self.assertOpenDays(self.datecls(2008, 3, 7), self.datecls(2008, 3, 9), 1)
+
+ def test_open_days_saturday_sunday(self):
+ self.assertOpenDays(self.datecls(2008, 3, 8), self.datecls(2008, 3, 9), 0)
+
+ def test_open_days_saturday_monday(self):
+ self.assertOpenDays(self.datecls(2008, 3, 8), self.datecls(2008, 3, 10), 0)
+
+ def test_open_days_saturday_tuesday(self):
+ self.assertOpenDays(self.datecls(2008, 3, 8), self.datecls(2008, 3, 11), 1)
+
+ def test_open_days_now_now(self):
+ x = self.now()
+ self.assertOpenDays(x, x, 0)
+
+ def test_open_days_now_now2(self):
+ x = self.datetimecls(2010, 5, 24)
+ self.assertOpenDays(x, x, 0)
+
+ def test_open_days_afternoon_before_holiday(self):
+ self.assertOpenDays(self.datetimecls(2008, 5, 7, 14), self.datetimecls(2008, 5, 8, 0), 1)
+
+ def test_open_days_afternoon_before_saturday(self):
+ self.assertOpenDays(self.datetimecls(2008, 5, 9, 14), self.datetimecls(2008, 5, 10, 14), 1)
+
+ def test_open_days_afternoon(self):
+ self.assertOpenDays(self.datetimecls(2008, 5, 6, 14), self.datetimecls(2008, 5, 7, 14), 1)
+
+ @tag('posix', '1900')
+ def test_ustrftime_before_1900(self):
+ date = self.datetimecls(1328, 3, 12, 6, 30)
+ self.assertEqual(ustrftime(date, '%Y-%m-%d %H:%M:%S'), u'1328-03-12 06:30:00')
+
+ @tag('posix', '1900')
+ def test_ticks2datetime_before_1900(self):
+ ticks = -2209075200000
+ date = ticks2datetime(ticks)
+ self.assertEqual(ustrftime(date, '%Y-%m-%d'), u'1899-12-31')
+
+ def test_month(self):
+ """enumerate months"""
+ r = list(date_range(self.datecls(2006, 5, 6), self.datecls(2006, 8, 27),
+ incmonth=True))
+ expected = [self.datecls(2006, 5, 6), self.datecls(2006, 6, 1), self.datecls(2006, 7, 1), self.datecls(2006, 8, 1)]
+ self.assertListEqual(expected, r)
+
+ def test_utcdatetime(self):
+ if self.datetimecls is mxDateTime:
+ return
+ d = self.datetimecls(2014, 11, 26, 12, 0, 0, 57, tzinfo=pytz.utc)
+ d = utcdatetime(d)
+ self.assertEqual(d, self.datetimecls(2014, 11, 26, 12, 0, 0, 57))
+ self.assertIsNone(d.tzinfo)
+
+ d = pytz.timezone('Europe/Paris').localize(
+ self.datetimecls(2014, 11, 26, 12, 0, 0, 57))
+ d = utcdatetime(d)
+ self.assertEqual(d, self.datetimecls(2014, 11, 26, 11, 0, 0, 57))
+ self.assertIsNone(d.tzinfo)
+
+ d = pytz.timezone('Europe/Paris').localize(
+ self.datetimecls(2014, 7, 26, 12, 0, 0, 57))
+ d = utcdatetime(d)
+ self.assertEqual(d, self.datetimecls(2014, 7, 26, 10, 0, 0, 57))
+ self.assertIsNone(d.tzinfo)
+
+ def test_datetime2ticks(self):
+ d = datetime(2014, 11, 26, 12, 0, 0, 57, tzinfo=pytz.utc)
+ timestamp = timegm(d.timetuple())
+ self.assertEqual(datetime2ticks(d), timestamp * 1000)
+ d = d.replace(microsecond=123456)
+ self.assertEqual(datetime2ticks(d), timestamp * 1000 + 123)
+
+ def test_datetime2ticks_date_argument(self):
+ d = date(2014, 11, 26)
+ timestamp = timegm(d.timetuple())
+ self.assertEqual(datetime2ticks(d), timestamp * 1000)
+
+
+class MxDateTC(DateTC):
+ datecls = mxDate
+ datetimecls = mxDateTime
+ timedeltacls = RelativeDateTime
+ now = mxNow
+
+ def check_mx(self):
+ if mxDate is None:
+ self.skipTest('mx.DateTime is not installed')
+
+ def setUp(self):
+ self.check_mx()
+
+ def test_month(self):
+ """enumerate months"""
+ r = list(date_range(self.datecls(2000, 1, 2), self.datecls(2000, 4, 4), endOfMonth))
+ expected = [self.datecls(2000, 1, 2), self.datecls(2000, 2, 29), self.datecls(2000, 3, 31)]
+ self.assertListEqual(r, expected)
+ r = list(date_range(self.datecls(2000, 11, 30), self.datecls(2001, 2, 3), endOfMonth))
+ expected = [self.datecls(2000, 11, 30), self.datecls(2000, 12, 31), self.datecls(2001, 1, 31)]
+ self.assertListEqual(r, expected)
+
+if __name__ == '__main__':
+ unittest_main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_decorators.py b/pymode/libs/logilab-common-1.4.1/test/unittest_decorators.py
new file mode 100644
index 00000000..e97a56f2
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_decorators.py
@@ -0,0 +1,208 @@
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+"""unit tests for the decorators module
+"""
+import sys
+import types
+
+from logilab.common.testlib import TestCase, unittest_main
+from logilab.common.decorators import (monkeypatch, cached, clear_cache,
+ copy_cache, cachedproperty)
+
+class DecoratorsTC(TestCase):
+
+ def test_monkeypatch_instance_method(self):
+ class MyClass: pass
+ @monkeypatch(MyClass)
+ def meth1(self):
+ return 12
+ class XXX(object):
+ @monkeypatch(MyClass)
+ def meth2(self):
+ return 12
+ if sys.version_info < (3, 0):
+ self.assertIsInstance(MyClass.meth1, types.MethodType)
+ self.assertIsInstance(MyClass.meth2, types.MethodType)
+ else:
+ # with python3, unbound method are functions
+ self.assertIsInstance(MyClass.meth1, types.FunctionType)
+ self.assertIsInstance(MyClass.meth2, types.FunctionType)
+ self.assertEqual(MyClass().meth1(), 12)
+ self.assertEqual(MyClass().meth2(), 12)
+
+ def test_monkeypatch_property(self):
+ class MyClass: pass
+ @monkeypatch(MyClass, methodname='prop1')
+ @property
+ def meth1(self):
+ return 12
+ self.assertIsInstance(MyClass.prop1, property)
+ self.assertEqual(MyClass().prop1, 12)
+
+ def test_monkeypatch_arbitrary_callable(self):
+ class MyClass: pass
+ class ArbitraryCallable(object):
+ def __call__(self):
+ return 12
+ # ensure it complains about missing __name__
+ with self.assertRaises(AttributeError) as cm:
+ monkeypatch(MyClass)(ArbitraryCallable())
+ self.assertTrue(str(cm.exception).endswith('has no __name__ attribute: you should provide an explicit `methodname`'))
+ # ensure no black magic under the hood
+ monkeypatch(MyClass, 'foo')(ArbitraryCallable())
+ self.assertTrue(callable(MyClass.foo))
+ self.assertEqual(MyClass().foo(), 12)
+
+ def test_monkeypatch_with_same_name(self):
+ class MyClass: pass
+ @monkeypatch(MyClass)
+ def meth1(self):
+ return 12
+ self.assertEqual([attr for attr in dir(MyClass) if attr[:2] != '__'],
+ ['meth1'])
+ inst = MyClass()
+ self.assertEqual(inst.meth1(), 12)
+
+ def test_monkeypatch_with_custom_name(self):
+ class MyClass: pass
+ @monkeypatch(MyClass, 'foo')
+ def meth2(self, param):
+ return param + 12
+ self.assertEqual([attr for attr in dir(MyClass) if attr[:2] != '__'],
+ ['foo'])
+ inst = MyClass()
+ self.assertEqual(inst.foo(4), 16)
+
+ def test_cannot_cache_generator(self):
+ def foo():
+ yield 42
+ self.assertRaises(AssertionError, cached, foo)
+
+ def test_cached_preserves_docstrings_and_name(self):
+ class Foo(object):
+ @cached
+ def foo(self):
+ """ what's up doc ? """
+ def bar(self, zogzog):
+ """ what's up doc ? """
+ bar = cached(bar, 1)
+ @cached
+ def quux(self, zogzog):
+ """ what's up doc ? """
+ self.assertEqual(Foo.foo.__doc__, """ what's up doc ? """)
+ self.assertEqual(Foo.foo.__name__, 'foo')
+ self.assertEqual(Foo.bar.__doc__, """ what's up doc ? """)
+ self.assertEqual(Foo.bar.__name__, 'bar')
+ self.assertEqual(Foo.quux.__doc__, """ what's up doc ? """)
+ self.assertEqual(Foo.quux.__name__, 'quux')
+
+ def test_cached_single_cache(self):
+ class Foo(object):
+ @cached(cacheattr=u'_foo')
+ def foo(self):
+ """ what's up doc ? """
+ foo = Foo()
+ foo.foo()
+ self.assertTrue(hasattr(foo, '_foo'))
+ clear_cache(foo, 'foo')
+ self.assertFalse(hasattr(foo, '_foo'))
+
+ def test_cached_multi_cache(self):
+ class Foo(object):
+ @cached(cacheattr=u'_foo')
+ def foo(self, args):
+ """ what's up doc ? """
+ foo = Foo()
+ foo.foo(1)
+ self.assertEqual(foo._foo, {(1,): None})
+ clear_cache(foo, 'foo')
+ self.assertFalse(hasattr(foo, '_foo'))
+
+ def test_cached_keyarg_cache(self):
+ class Foo(object):
+ @cached(cacheattr=u'_foo', keyarg=1)
+ def foo(self, other, args):
+ """ what's up doc ? """
+ foo = Foo()
+ foo.foo(2, 1)
+ self.assertEqual(foo._foo, {2: None})
+ clear_cache(foo, 'foo')
+ self.assertFalse(hasattr(foo, '_foo'))
+
+ def test_cached_property(self):
+ class Foo(object):
+ @property
+ @cached(cacheattr=u'_foo')
+ def foo(self):
+ """ what's up doc ? """
+ foo = Foo()
+ foo.foo
+ self.assertEqual(foo._foo, None)
+ clear_cache(foo, 'foo')
+ self.assertFalse(hasattr(foo, '_foo'))
+
+ def test_copy_cache(self):
+ class Foo(object):
+ @cached(cacheattr=u'_foo')
+ def foo(self, args):
+ """ what's up doc ? """
+ foo = Foo()
+ foo.foo(1)
+ self.assertEqual(foo._foo, {(1,): None})
+ foo2 = Foo()
+ self.assertFalse(hasattr(foo2, '_foo'))
+ copy_cache(foo2, 'foo', foo)
+ self.assertEqual(foo2._foo, {(1,): None})
+
+
+ def test_cachedproperty(self):
+ class Foo(object):
+ x = 0
+ @cachedproperty
+ def bar(self):
+ self.__class__.x += 1
+ return self.__class__.x
+ @cachedproperty
+ def quux(self):
+ """ some prop """
+ return 42
+
+ foo = Foo()
+ self.assertEqual(Foo.x, 0)
+ self.assertFalse('bar' in foo.__dict__)
+ self.assertEqual(foo.bar, 1)
+ self.assertTrue('bar' in foo.__dict__)
+ self.assertEqual(foo.bar, 1)
+ self.assertEqual(foo.quux, 42)
+ self.assertEqual(Foo.bar.__doc__,
+ '')
+ self.assertEqual(Foo.quux.__doc__,
+ '\n some prop ')
+
+ foo2 = Foo()
+ self.assertEqual(foo2.bar, 2)
+ # make sure foo.foo is cached
+ self.assertEqual(foo.bar, 1)
+
+ class Kallable(object):
+ def __call__(self):
+ return 42
+ self.assertRaises(TypeError, cachedproperty, Kallable())
+
+if __name__ == '__main__':
+ unittest_main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_deprecation.py b/pymode/libs/logilab-common-1.4.1/test/unittest_deprecation.py
new file mode 100644
index 00000000..b0f8a1aa
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_deprecation.py
@@ -0,0 +1,147 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+"""unit tests for logilab.common.deprecation"""
+
+import warnings
+
+from six import add_metaclass
+
+from logilab.common.testlib import TestCase, unittest_main
+from logilab.common import deprecation
+
+
+class RawInputTC(TestCase):
+
+ # XXX with 2.6 we could test warnings
+ # http://docs.python.org/library/warnings.html#testing-warnings
+ # instead we just make sure it does not crash
+
+ def mock_warn(self, *args, **kwargs):
+ self.messages.append(args[0])
+
+ def setUp(self):
+ self.messages = []
+ deprecation.warn = self.mock_warn
+
+ def tearDown(self):
+ deprecation.warn = warnings.warn
+
+ def mk_func(self):
+ def any_func():
+ pass
+ return any_func
+
+ def test_class_deprecated(self):
+ @add_metaclass(deprecation.class_deprecated)
+ class AnyClass(object):
+ pass
+ AnyClass()
+ self.assertEqual(self.messages,
+ ['AnyClass is deprecated'])
+
+ def test_deprecated_func(self):
+ any_func = deprecation.deprecated()(self.mk_func())
+ any_func()
+ any_func = deprecation.deprecated('message')(self.mk_func())
+ any_func()
+ self.assertEqual(self.messages,
+ ['The function "any_func" is deprecated', 'message'])
+
+ def test_deprecated_decorator(self):
+ @deprecation.deprecated()
+ def any_func():
+ pass
+ any_func()
+ @deprecation.deprecated('message')
+ def any_func():
+ pass
+ any_func()
+ self.assertEqual(self.messages,
+ ['The function "any_func" is deprecated', 'message'])
+
+ def test_moved(self):
+ module = 'data.deprecation'
+ any_func = deprecation.moved(module, 'moving_target')
+ any_func()
+ self.assertEqual(self.messages,
+ ['object moving_target has been moved to module data.deprecation'])
+
+ def test_deprecated_manager(self):
+ deprecator = deprecation.DeprecationManager("module_name")
+ deprecator.compatibility('1.3')
+ # This warn should be printed.
+ deprecator.warn('1.1', "Major deprecation message.", 1)
+ deprecator.warn('1.1')
+
+ @deprecator.deprecated('1.2', 'Major deprecation message.')
+ def any_func():
+ pass
+ any_func()
+
+ @deprecator.deprecated('1.2')
+ def other_func():
+ pass
+ other_func()
+
+ self.assertListEqual(self.messages,
+ ['[module_name 1.1] Major deprecation message.',
+ '[module_name 1.1] ',
+ '[module_name 1.2] Major deprecation message.',
+ '[module_name 1.2] The function "other_func" is deprecated'])
+
+ def test_class_deprecated_manager(self):
+ deprecator = deprecation.DeprecationManager("module_name")
+ deprecator.compatibility('1.3')
+ @add_metaclass(deprecator.class_deprecated('1.2'))
+ class AnyClass(object):
+ pass
+ AnyClass()
+ self.assertEqual(self.messages,
+ ['[module_name 1.2] AnyClass is deprecated'])
+
+
+ def test_deprecated_manager_noprint(self):
+ deprecator = deprecation.DeprecationManager("module_name")
+ deprecator.compatibility('1.3')
+ # This warn should not be printed.
+ deprecator.warn('1.3', "Minor deprecation message.", 1)
+
+ @deprecator.deprecated('1.3', 'Minor deprecation message.')
+ def any_func():
+ pass
+ any_func()
+
+ @deprecator.deprecated('1.20')
+ def other_func():
+ pass
+ other_func()
+
+ @deprecator.deprecated('1.4')
+ def other_func():
+ pass
+ other_func()
+
+ class AnyClass(object):
+ __metaclass__ = deprecator.class_deprecated((1,5))
+ AnyClass()
+
+ self.assertFalse(self.messages)
+
+
+if __name__ == '__main__':
+ unittest_main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_fileutils.py b/pymode/libs/logilab-common-1.4.1/test/unittest_fileutils.py
new file mode 100644
index 00000000..555e73f4
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_fileutils.py
@@ -0,0 +1,146 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+"""unit tests for logilab.common.fileutils"""
+
+import doctest
+import io
+import sys, os, tempfile, shutil
+from stat import S_IWRITE
+from os.path import join
+
+from logilab.common.testlib import TestCase, unittest_main, unittest
+
+from logilab.common.fileutils import *
+
+DATA_DIR = join(os.path.abspath(os.path.dirname(__file__)), 'data')
+NEWLINES_TXT = join(DATA_DIR, 'newlines.txt')
+
+
+class FirstleveldirectoryTC(TestCase):
+
+ def test_known_values_first_level_directory(self):
+ """return the first level directory of a path"""
+ self.assertEqual(first_level_directory('truc/bidule/chouette'), 'truc', None)
+ self.assertEqual(first_level_directory('/truc/bidule/chouette'), '/', None)
+
+class IsBinaryTC(TestCase):
+ def test(self):
+ self.assertEqual(is_binary('toto.txt'), 0)
+ #self.assertEqual(is_binary('toto.xml'), 0)
+ self.assertEqual(is_binary('toto.bin'), 1)
+ self.assertEqual(is_binary('toto.sxi'), 1)
+ self.assertEqual(is_binary('toto.whatever'), 1)
+
+class GetModeTC(TestCase):
+ def test(self):
+ self.assertEqual(write_open_mode('toto.txt'), 'w')
+ #self.assertEqual(write_open_mode('toto.xml'), 'w')
+ self.assertEqual(write_open_mode('toto.bin'), 'wb')
+ self.assertEqual(write_open_mode('toto.sxi'), 'wb')
+
+class NormReadTC(TestCase):
+ def test_known_values_norm_read(self):
+ with io.open(NEWLINES_TXT) as f:
+ data = f.read()
+ self.assertEqual(data.strip(), '\n'.join(['# mixed new lines', '1', '2', '3']))
+
+
+class LinesTC(TestCase):
+ def test_known_values_lines(self):
+ self.assertEqual(lines(NEWLINES_TXT),
+ ['# mixed new lines', '1', '2', '3'])
+
+ def test_known_values_lines_comment(self):
+ self.assertEqual(lines(NEWLINES_TXT, comments='#'),
+ ['1', '2', '3'])
+
+class ExportTC(TestCase):
+ def setUp(self):
+ self.tempdir = tempfile.mktemp()
+ os.mkdir(self.tempdir)
+
+ def test(self):
+ export(DATA_DIR, self.tempdir, verbose=0)
+ self.assertTrue(exists(join(self.tempdir, '__init__.py')))
+ self.assertTrue(exists(join(self.tempdir, 'sub')))
+ self.assertTrue(not exists(join(self.tempdir, '__init__.pyc')))
+ self.assertTrue(not exists(join(self.tempdir, 'CVS')))
+
+ def tearDown(self):
+ shutil.rmtree(self.tempdir)
+
+class ProtectedFileTC(TestCase):
+ def setUp(self):
+ self.rpath = join(DATA_DIR, 'write_protected_file.txt')
+ self.rwpath = join(DATA_DIR, 'normal_file.txt')
+ # Make sure rpath is not writable !
+ os.chmod(self.rpath, 33060)
+ # Make sure rwpath is writable !
+ os.chmod(self.rwpath, 33188)
+
+ def test_mode_change(self):
+ """tests that mode is changed when needed"""
+ # test on non-writable file
+ #self.assertTrue(not os.access(self.rpath, os.W_OK))
+ self.assertTrue(not os.stat(self.rpath).st_mode & S_IWRITE)
+ wp_file = ProtectedFile(self.rpath, 'w')
+ self.assertTrue(os.stat(self.rpath).st_mode & S_IWRITE)
+ self.assertTrue(os.access(self.rpath, os.W_OK))
+ # test on writable-file
+ self.assertTrue(os.stat(self.rwpath).st_mode & S_IWRITE)
+ self.assertTrue(os.access(self.rwpath, os.W_OK))
+ wp_file = ProtectedFile(self.rwpath, 'w')
+ self.assertTrue(os.stat(self.rwpath).st_mode & S_IWRITE)
+ self.assertTrue(os.access(self.rwpath, os.W_OK))
+
+ def test_restore_on_close(self):
+ """tests original mode is restored on close"""
+ # test on non-writable file
+ #self.assertTrue(not os.access(self.rpath, os.W_OK))
+ self.assertTrue(not os.stat(self.rpath).st_mode & S_IWRITE)
+ ProtectedFile(self.rpath, 'w').close()
+ #self.assertTrue(not os.access(self.rpath, os.W_OK))
+ self.assertTrue(not os.stat(self.rpath).st_mode & S_IWRITE)
+ # test on writable-file
+ self.assertTrue(os.access(self.rwpath, os.W_OK))
+ self.assertTrue(os.stat(self.rwpath).st_mode & S_IWRITE)
+ ProtectedFile(self.rwpath, 'w').close()
+ self.assertTrue(os.access(self.rwpath, os.W_OK))
+ self.assertTrue(os.stat(self.rwpath).st_mode & S_IWRITE)
+
+ def test_mode_change_on_append(self):
+ """tests that mode is changed when file is opened in 'a' mode"""
+ #self.assertTrue(not os.access(self.rpath, os.W_OK))
+ self.assertTrue(not os.stat(self.rpath).st_mode & S_IWRITE)
+ wp_file = ProtectedFile(self.rpath, 'a')
+ self.assertTrue(os.access(self.rpath, os.W_OK))
+ self.assertTrue(os.stat(self.rpath).st_mode & S_IWRITE)
+ wp_file.close()
+ #self.assertTrue(not os.access(self.rpath, os.W_OK))
+ self.assertTrue(not os.stat(self.rpath).st_mode & S_IWRITE)
+
+
+if sys.version_info < (3, 0):
+ def load_tests(loader, tests, ignore):
+ from logilab.common import fileutils
+ tests.addTests(doctest.DocTestSuite(fileutils))
+ return tests
+
+
+if __name__ == '__main__':
+ unittest_main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_graph.py b/pymode/libs/logilab-common-1.4.1/test/unittest_graph.py
new file mode 100644
index 00000000..9a2e8bc9
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_graph.py
@@ -0,0 +1,89 @@
+# unit tests for the cache module
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+
+from logilab.common.testlib import TestCase, unittest_main
+from logilab.common.graph import get_cycles, has_path, ordered_nodes, UnorderableGraph
+
+class getCyclesTC(TestCase):
+
+ def test_known0(self):
+ self.assertEqual(get_cycles({1:[2], 2:[3], 3:[1]}), [[1, 2, 3]])
+
+ def test_known1(self):
+ self.assertEqual(get_cycles({1:[2], 2:[3], 3:[1, 4], 4:[3]}), [[1, 2, 3], [3, 4]])
+
+ def test_known2(self):
+ self.assertEqual(get_cycles({1:[2], 2:[3], 3:[0], 0:[]}), [])
+
+
+class hasPathTC(TestCase):
+
+ def test_direct_connection(self):
+ self.assertEqual(has_path({'A': ['B'], 'B': ['A']}, 'A', 'B'), ['B'])
+
+ def test_indirect_connection(self):
+ self.assertEqual(has_path({'A': ['B'], 'B': ['A', 'C'], 'C': ['B']}, 'A', 'C'), ['B', 'C'])
+
+ def test_no_connection(self):
+ self.assertEqual(has_path({'A': ['B'], 'B': ['A']}, 'A', 'C'), None)
+
+ def test_cycle(self):
+ self.assertEqual(has_path({'A': ['A']}, 'A', 'B'), None)
+
+class ordered_nodesTC(TestCase):
+
+ def test_one_item(self):
+ graph = {'a':[]}
+ ordered = ordered_nodes(graph)
+ self.assertEqual(ordered, ('a',))
+
+ def test_single_dependency(self):
+ graph = {'a':['b'], 'b':[]}
+ ordered = ordered_nodes(graph)
+ self.assertEqual(ordered, ('a','b'))
+ graph = {'a':[], 'b':['a']}
+ ordered = ordered_nodes(graph)
+ self.assertEqual(ordered, ('b','a'))
+
+ def test_two_items_no_dependency(self):
+ graph = {'a':[], 'b':[]}
+ ordered = ordered_nodes(graph)
+ self.assertEqual(ordered, ('a','b'))
+
+ def test_three_items_no_dependency(self):
+ graph = {'a':[], 'b':[], 'c':[]}
+ ordered = ordered_nodes(graph)
+ self.assertEqual(ordered, ('a', 'b', 'c'))
+
+ def test_three_items_one_dependency(self):
+ graph = {'a': ['c'], 'b': [], 'c':[]}
+ ordered = ordered_nodes(graph)
+ self.assertEqual(ordered, ('a', 'b', 'c'))
+
+ def test_three_items_two_dependencies(self):
+ graph = {'a': ['b'], 'b': ['c'], 'c':[]}
+ ordered = ordered_nodes(graph)
+ self.assertEqual(ordered, ('a', 'b', 'c'))
+
+ def test_bad_graph(self):
+ graph = {'a':['b']}
+ self.assertRaises(UnorderableGraph, ordered_nodes, graph)
+
+if __name__ == "__main__":
+ unittest_main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_interface.py b/pymode/libs/logilab-common-1.4.1/test/unittest_interface.py
new file mode 100644
index 00000000..1dbed7a1
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_interface.py
@@ -0,0 +1,87 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+from logilab.common.testlib import TestCase, unittest_main
+from logilab.common.interface import *
+
+class IFace1(Interface): pass
+class IFace2(Interface): pass
+class IFace3(Interface): pass
+
+
+class A(object):
+ __implements__ = (IFace1,)
+
+
+class B(A): pass
+
+
+class C1(B):
+ __implements__ = list(B.__implements__) + [IFace3]
+
+class C2(B):
+ __implements__ = B.__implements__ + (IFace2,)
+
+class D(C1):
+ __implements__ = ()
+
+class Z(object): pass
+
+class ExtendTC(TestCase):
+
+ def setUp(self):
+ global aimpl, c1impl, c2impl, dimpl
+ aimpl = A.__implements__
+ c1impl = C1.__implements__
+ c2impl = C2.__implements__
+ dimpl = D.__implements__
+
+ def test_base(self):
+ extend(A, IFace2)
+ self.assertEqual(A.__implements__, (IFace1, IFace2))
+ self.assertEqual(B.__implements__, (IFace1, IFace2))
+ self.assertTrue(B.__implements__ is A.__implements__)
+ self.assertEqual(C1.__implements__, [IFace1, IFace3, IFace2])
+ self.assertEqual(C2.__implements__, (IFace1, IFace2))
+ self.assertTrue(C2.__implements__ is c2impl)
+ self.assertEqual(D.__implements__, (IFace2,))
+
+ def test_already_impl(self):
+ extend(A, IFace1)
+ self.assertTrue(A.__implements__ is aimpl)
+
+ def test_no_impl(self):
+ extend(Z, IFace1)
+ self.assertEqual(Z.__implements__, (IFace1,))
+
+ def test_notimpl_explicit(self):
+ extend(C1, IFace3)
+ self.assertTrue(C1.__implements__ is c1impl)
+ self.assertTrue(D.__implements__ is dimpl)
+
+
+ def test_nonregr_implements_baseinterface(self):
+ class SubIFace(IFace1): pass
+ class X(object):
+ __implements__ = (SubIFace,)
+
+ self.assertTrue(SubIFace.is_implemented_by(X))
+ self.assertTrue(IFace1.is_implemented_by(X))
+
+
+if __name__ == '__main__':
+ unittest_main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_modutils.py b/pymode/libs/logilab-common-1.4.1/test/unittest_modutils.py
new file mode 100644
index 00000000..ec2a5c82
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_modutils.py
@@ -0,0 +1,296 @@
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+"""
+unit tests for module modutils (module manipulation utilities)
+"""
+
+import doctest
+import sys
+import warnings
+try:
+ __file__
+except NameError:
+ __file__ = sys.argv[0]
+
+from logilab.common.testlib import TestCase, unittest_main
+from logilab.common import modutils
+
+from os import path, getcwd, sep
+from logilab import common
+from logilab.common import tree
+
+sys.path.insert(0, path.dirname(__file__))
+DATADIR = path.join(path.dirname(__file__), 'data')
+
+
+class ModutilsTestCase(TestCase):
+ def setUp(self):
+ super(ModutilsTestCase, self).setUp()
+ self.__common_in_path = common.__path__[0] in sys.path
+ if self.__common_in_path:
+ sys.path.remove(common.__path__[0])
+
+ def tearDown(self):
+ if self.__common_in_path:
+ sys.path.insert(0, common.__path__[0])
+ super(ModutilsTestCase, self).tearDown()
+
+
+class ModuleFileTC(ModutilsTestCase):
+ package = "mypypa"
+
+ def setUp(self):
+ super(ModuleFileTC, self).setUp()
+ for k in list(sys.path_importer_cache.keys()):
+ if 'MyPyPa' in k:
+ del sys.path_importer_cache[k]
+
+ def test_find_zipped_module(self):
+ mtype, mfile = modutils._module_file([self.package], [path.join(DATADIR, 'MyPyPa-0.1.0.zip')])
+ self.assertEqual(mtype, modutils.ZIPFILE)
+ self.assertEqual(mfile.split(sep)[-4:], ["test", "data", "MyPyPa-0.1.0.zip", self.package])
+
+ def test_find_egg_module(self):
+ mtype, mfile = modutils._module_file([self.package], [path.join(DATADIR, 'MyPyPa-0.1.0-py2.5.egg')])
+ self.assertEqual(mtype, modutils.ZIPFILE)
+ self.assertEqual(mfile.split(sep)[-4:], ["test", "data", "MyPyPa-0.1.0-py2.5.egg", self.package])
+
+
+class load_module_from_name_tc(ModutilsTestCase):
+ """ load a python module from it's name """
+
+ def test_knownValues_load_module_from_name_1(self):
+ self.assertEqual(modutils.load_module_from_name('sys'), sys)
+
+ def test_knownValues_load_module_from_name_2(self):
+ self.assertEqual(modutils.load_module_from_name('os.path'), path)
+
+ def test_raise_load_module_from_name_1(self):
+ self.assertRaises(ImportError,
+ modutils.load_module_from_name, 'os.path', use_sys=0)
+
+
+class get_module_part_tc(ModutilsTestCase):
+ """given a dotted name return the module part of the name"""
+
+ def test_knownValues_get_module_part_1(self):
+ self.assertEqual(modutils.get_module_part('logilab.common.modutils'),
+ 'logilab.common.modutils')
+
+ def test_knownValues_get_module_part_2(self):
+ self.assertEqual(modutils.get_module_part('logilab.common.modutils.get_module_part'),
+ 'logilab.common.modutils')
+
+ def test_knownValues_get_module_part_3(self):
+ """relative import from given file"""
+ self.assertEqual(modutils.get_module_part('interface.Interface',
+ modutils.__file__), 'interface')
+
+ def test_knownValues_get_compiled_module_part(self):
+ self.assertEqual(modutils.get_module_part('math.log10'), 'math')
+ self.assertEqual(modutils.get_module_part('math.log10', __file__), 'math')
+
+ def test_knownValues_get_builtin_module_part(self):
+ self.assertEqual(modutils.get_module_part('sys.path'), 'sys')
+ self.assertEqual(modutils.get_module_part('sys.path', '__file__'), 'sys')
+
+ def test_get_module_part_exception(self):
+ self.assertRaises(ImportError, modutils.get_module_part, 'unknown.module',
+ modutils.__file__)
+
+
+class modpath_from_file_tc(ModutilsTestCase):
+ """ given an absolute file path return the python module's path as a list """
+
+ def test_knownValues_modpath_from_file_1(self):
+ with warnings.catch_warnings(record=True) as warns:
+ self.assertEqual(modutils.modpath_from_file(modutils.__file__),
+ ['logilab', 'common', 'modutils'])
+ self.assertIn('you should avoid using modpath_from_file()',
+ [str(w.message) for w in warns])
+
+ def test_knownValues_modpath_from_file_2(self):
+ self.assertEqual(modutils.modpath_from_file('unittest_modutils.py',
+ {getcwd(): 'arbitrary.pkg'}),
+ ['arbitrary', 'pkg', 'unittest_modutils'])
+
+ def test_raise_modpath_from_file_Exception(self):
+ self.assertRaises(Exception, modutils.modpath_from_file, '/turlututu')
+
+
+class load_module_from_path_tc(ModutilsTestCase):
+
+ def test_do_not_load_twice(self):
+ sys.path.insert(0, self.datadir)
+ foo = modutils.load_module_from_modpath(['lmfp', 'foo'])
+ lmfp = modutils.load_module_from_modpath(['lmfp'])
+ self.assertEqual(len(sys.just_once), 1)
+ sys.path.pop(0)
+ del sys.just_once
+
+class file_from_modpath_tc(ModutilsTestCase):
+ """given a mod path (i.e. splited module / package name), return the
+ corresponding file, giving priority to source file over precompiled file
+ if it exists"""
+
+ def test_site_packages(self):
+ from pytz import tzinfo
+ self.assertEqual(path.realpath(modutils.file_from_modpath(['pytz', 'tzinfo'])),
+ path.realpath(tzinfo.__file__.replace('.pyc', '.py')))
+
+ def test_std_lib(self):
+ from os import path
+ self.assertEqual(path.realpath(modutils.file_from_modpath(['os', 'path']).replace('.pyc', '.py')),
+ path.realpath(path.__file__.replace('.pyc', '.py')))
+
+ def test_xmlplus(self):
+ try:
+ # don't fail if pyxml isn't installed
+ from xml.dom import ext
+ except ImportError:
+ pass
+ else:
+ self.assertEqual(path.realpath(modutils.file_from_modpath(['xml', 'dom', 'ext']).replace('.pyc', '.py')),
+ path.realpath(ext.__file__.replace('.pyc', '.py')))
+
+ def test_builtin(self):
+ self.assertEqual(modutils.file_from_modpath(['sys']),
+ None)
+
+
+ def test_unexisting(self):
+ self.assertRaises(ImportError, modutils.file_from_modpath, ['turlututu'])
+
+
+class get_source_file_tc(ModutilsTestCase):
+
+ def test(self):
+ from os import path
+ self.assertEqual(modutils.get_source_file(path.__file__),
+ path.__file__.replace('.pyc', '.py'))
+
+ def test_raise(self):
+ self.assertRaises(modutils.NoSourceFile, modutils.get_source_file, 'whatever')
+
+class is_standard_module_tc(ModutilsTestCase):
+ """
+ return true if the module may be considered as a module from the standard
+ library
+ """
+
+ def test_builtins(self):
+ if sys.version_info < (3, 0):
+ self.assertEqual(modutils.is_standard_module('__builtin__'), True)
+ self.assertEqual(modutils.is_standard_module('builtins'), False)
+ else:
+ self.assertEqual(modutils.is_standard_module('__builtin__'), False)
+ self.assertEqual(modutils.is_standard_module('builtins'), True)
+
+ def test_builtin(self):
+ self.assertEqual(modutils.is_standard_module('sys'), True)
+
+ def test_nonstandard(self):
+ self.assertEqual(modutils.is_standard_module('logilab'), False)
+
+ def test_unknown(self):
+ self.assertEqual(modutils.is_standard_module('unknown'), False)
+
+ def test_4(self):
+ self.assertEqual(modutils.is_standard_module('marshal'), True)
+ self.assertEqual(modutils.is_standard_module('pickle'), True)
+ self.assertEqual(modutils.is_standard_module('email'), True)
+ self.assertEqual(modutils.is_standard_module('StringIO'), sys.version_info < (3, 0))
+ venv_py3 = sys.version_info[0] >= 3 and hasattr(sys, 'real_prefix')
+ if not venv_py3:
+ # those modules are symlinked by virtualenv (but not by python's venv)
+ self.assertEqual(modutils.is_standard_module('hashlib'), True)
+ self.assertEqual(modutils.is_standard_module('io'), True)
+
+ def test_custom_path(self):
+ self.assertEqual(modutils.is_standard_module('data.module', (DATADIR,)), True)
+ self.assertEqual(modutils.is_standard_module('data.module', (path.abspath(DATADIR),)), True)
+
+ def test_failing_border_cases(self):
+ # using a subpackage/submodule path as std_path argument
+ self.assertEqual(modutils.is_standard_module('logilab.common', common.__path__), False)
+ # using a module + object name as modname argument
+ self.assertEqual(modutils.is_standard_module('sys.path'), True)
+ # this is because only the first package/module is considered
+ self.assertEqual(modutils.is_standard_module('sys.whatever'), True)
+ self.assertEqual(modutils.is_standard_module('logilab.whatever', common.__path__), False)
+
+
+class is_relative_tc(ModutilsTestCase):
+
+
+ def test_knownValues_is_relative_1(self):
+ self.assertEqual(modutils.is_relative('modutils', common.__path__[0]), True)
+
+ def test_knownValues_is_relative_2(self):
+ self.assertEqual(modutils.is_relative('modutils', tree.__file__), True)
+
+ def test_knownValues_is_relative_3(self):
+ self.assertEqual(modutils.is_relative('logilab.common.modutils',
+ common.__path__[0]), False)
+
+class get_modules_tc(ModutilsTestCase):
+
+ def test_knownValues_get_modules_1(self): # XXXFIXME: TOWRITE
+ """given a directory return a list of all available python modules, even
+ in subdirectories
+ """
+ import data.find_test as data
+ mod_path = ("data", 'find_test')
+ modules = sorted(modutils.get_modules(path.join(*mod_path),
+ data.__path__[0]))
+ self.assertSetEqual(set(modules),
+ set([ '.'.join(mod_path + (mod, )) for mod in ('module', 'module2',
+ 'noendingnewline', 'nonregr')]))
+
+
+class get_modules_files_tc(ModutilsTestCase):
+
+ def test_knownValues_get_module_files_1(self): # XXXFIXME: TOWRITE
+ """given a directory return a list of all available python module's files, even
+ in subdirectories
+ """
+ import data
+ modules = sorted(modutils.get_module_files(path.join(DATADIR, 'find_test'),
+ data.__path__[0]))
+ self.assertEqual(modules,
+ [path.join(DATADIR, 'find_test', x) for x in ['__init__.py', 'module.py', 'module2.py', 'noendingnewline.py', 'nonregr.py']])
+
+ def test_load_module_set_attribute(self):
+ import logilab.common.fileutils
+ import logilab
+ del logilab.common.fileutils
+ del sys.modules['logilab.common.fileutils']
+ m = modutils.load_module_from_modpath(['logilab', 'common', 'fileutils'])
+ self.assertTrue( hasattr(logilab, 'common') )
+ self.assertTrue( hasattr(logilab.common, 'fileutils') )
+ self.assertTrue( m is logilab.common.fileutils )
+
+
+def load_tests(loader, tests, ignore):
+ from logilab.common import modutils
+ tests.addTests(doctest.DocTestSuite(modutils))
+ return tests
+
+
+if __name__ == '__main__':
+ unittest_main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_pytest.py b/pymode/libs/logilab-common-1.4.1/test/unittest_pytest.py
new file mode 100644
index 00000000..48e36ce5
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_pytest.py
@@ -0,0 +1,86 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+from os.path import join
+from logilab.common.testlib import TestCase, unittest_main
+from logilab.common.pytest import *
+
+class ModuleFunctionTC(TestCase):
+ def test_this_is_testdir(self):
+ self.assertTrue(this_is_a_testdir("test"))
+ self.assertTrue(this_is_a_testdir("tests"))
+ self.assertTrue(this_is_a_testdir("unittests"))
+ self.assertTrue(this_is_a_testdir("unittest"))
+ self.assertFalse(this_is_a_testdir("unit"))
+ self.assertFalse(this_is_a_testdir("units"))
+ self.assertFalse(this_is_a_testdir("undksjhqfl"))
+ self.assertFalse(this_is_a_testdir("this_is_not_a_dir_test"))
+ self.assertFalse(this_is_a_testdir("this_is_not_a_testdir"))
+ self.assertFalse(this_is_a_testdir("unittestsarenothere"))
+ self.assertTrue(this_is_a_testdir(join("coincoin", "unittests")))
+ self.assertFalse(this_is_a_testdir(join("unittests", "spongebob")))
+
+ def test_this_is_testfile(self):
+ self.assertTrue(this_is_a_testfile("test.py"))
+ self.assertTrue(this_is_a_testfile("testbabar.py"))
+ self.assertTrue(this_is_a_testfile("unittest_celestine.py"))
+ self.assertTrue(this_is_a_testfile("smoketest.py"))
+ self.assertFalse(this_is_a_testfile("test.pyc"))
+ self.assertFalse(this_is_a_testfile("zephir_test.py"))
+ self.assertFalse(this_is_a_testfile("smoketest.pl"))
+ self.assertFalse(this_is_a_testfile("unittest"))
+ self.assertTrue(this_is_a_testfile(join("coincoin", "unittest_bibi.py")))
+ self.assertFalse(this_is_a_testfile(join("unittest", "spongebob.py")))
+
+ def test_replace_trace(self):
+ def tracefn(frame, event, arg):
+ pass
+
+ oldtrace = sys.gettrace()
+ with replace_trace(tracefn):
+ self.assertIs(sys.gettrace(), tracefn)
+
+ self.assertIs(sys.gettrace(), oldtrace)
+
+ def test_pause_trace(self):
+ def tracefn(frame, event, arg):
+ pass
+
+ oldtrace = sys.gettrace()
+ sys.settrace(tracefn)
+ try:
+ self.assertIs(sys.gettrace(), tracefn)
+ with pause_trace():
+ self.assertIs(sys.gettrace(), None)
+ self.assertIs(sys.gettrace(), tracefn)
+ finally:
+ sys.settrace(oldtrace)
+
+ def test_nocoverage(self):
+ def tracefn(frame, event, arg):
+ pass
+
+ @nocoverage
+ def myfn():
+ self.assertIs(sys.gettrace(), None)
+
+ with replace_trace(tracefn):
+ myfn()
+
+
+if __name__ == '__main__':
+ unittest_main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_registry.py b/pymode/libs/logilab-common-1.4.1/test/unittest_registry.py
new file mode 100644
index 00000000..1c07e4ce
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_registry.py
@@ -0,0 +1,220 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of Logilab-Common.
+#
+# Logilab-Common is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# Logilab-Common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with Logilab-Common. If not, see .
+"""unit tests for selectors mechanism"""
+
+import gc
+import logging
+import os.path as osp
+import sys
+from operator import eq, lt, le, gt
+from contextlib import contextmanager
+import warnings
+
+logging.basicConfig(level=logging.ERROR)
+
+from logilab.common.testlib import TestCase, unittest_main
+
+from logilab.common.registry import *
+
+
+class _1_(Predicate):
+ def __call__(self, *args, **kwargs):
+ return 1
+
+class _0_(Predicate):
+ def __call__(self, *args, **kwargs):
+ return 0
+
+def _2_(*args, **kwargs):
+ return 2
+
+
+class SelectorsTC(TestCase):
+ def test_basic_and(self):
+ selector = _1_() & _1_()
+ self.assertEqual(selector(None), 2)
+ selector = _1_() & _0_()
+ self.assertEqual(selector(None), 0)
+ selector = _0_() & _1_()
+ self.assertEqual(selector(None), 0)
+
+ def test_basic_or(self):
+ selector = _1_() | _1_()
+ self.assertEqual(selector(None), 1)
+ selector = _1_() | _0_()
+ self.assertEqual(selector(None), 1)
+ selector = _0_() | _1_()
+ self.assertEqual(selector(None), 1)
+ selector = _0_() | _0_()
+ self.assertEqual(selector(None), 0)
+
+ def test_selector_and_function(self):
+ selector = _1_() & _2_
+ self.assertEqual(selector(None), 3)
+ selector = _2_ & _1_()
+ self.assertEqual(selector(None), 3)
+
+ def test_three_and(self):
+ selector = _1_() & _1_() & _1_()
+ self.assertEqual(selector(None), 3)
+ selector = _1_() & _0_() & _1_()
+ self.assertEqual(selector(None), 0)
+ selector = _0_() & _1_() & _1_()
+ self.assertEqual(selector(None), 0)
+
+ def test_three_or(self):
+ selector = _1_() | _1_() | _1_()
+ self.assertEqual(selector(None), 1)
+ selector = _1_() | _0_() | _1_()
+ self.assertEqual(selector(None), 1)
+ selector = _0_() | _1_() | _1_()
+ self.assertEqual(selector(None), 1)
+ selector = _0_() | _0_() | _0_()
+ self.assertEqual(selector(None), 0)
+
+ def test_composition(self):
+ selector = (_1_() & _1_()) & (_1_() & _1_())
+ self.assertTrue(isinstance(selector, AndPredicate))
+ self.assertEqual(len(selector.selectors), 4)
+ self.assertEqual(selector(None), 4)
+ selector = (_1_() & _0_()) | (_1_() & _1_())
+ self.assertTrue(isinstance(selector, OrPredicate))
+ self.assertEqual(len(selector.selectors), 2)
+ self.assertEqual(selector(None), 2)
+
+ def test_search_selectors(self):
+ sel = _1_()
+ self.assertIs(sel.search_selector(_1_), sel)
+ csel = AndPredicate(sel, Predicate())
+ self.assertIs(csel.search_selector(_1_), sel)
+ csel = AndPredicate(Predicate(), sel)
+ self.assertIs(csel.search_selector(_1_), sel)
+ self.assertIs(csel.search_selector((AndPredicate, OrPredicate)), csel)
+ self.assertIs(csel.search_selector((OrPredicate, AndPredicate)), csel)
+ self.assertIs(csel.search_selector((_1_, _0_)), sel)
+ self.assertIs(csel.search_selector((_0_, _1_)), sel)
+
+ def test_inplace_and(self):
+ selector = _1_()
+ selector &= _1_()
+ selector &= _1_()
+ self.assertEqual(selector(None), 3)
+ selector = _1_()
+ selector &= _0_()
+ selector &= _1_()
+ self.assertEqual(selector(None), 0)
+ selector = _0_()
+ selector &= _1_()
+ selector &= _1_()
+ self.assertEqual(selector(None), 0)
+ selector = _0_()
+ selector &= _0_()
+ selector &= _0_()
+ self.assertEqual(selector(None), 0)
+
+ def test_inplace_or(self):
+ selector = _1_()
+ selector |= _1_()
+ selector |= _1_()
+ self.assertEqual(selector(None), 1)
+ selector = _1_()
+ selector |= _0_()
+ selector |= _1_()
+ self.assertEqual(selector(None), 1)
+ selector = _0_()
+ selector |= _1_()
+ selector |= _1_()
+ self.assertEqual(selector(None), 1)
+ selector = _0_()
+ selector |= _0_()
+ selector |= _0_()
+ self.assertEqual(selector(None), 0)
+
+ def test_wrap_selectors(self):
+ class _temp_(Predicate):
+ def __call__(self, *args, **kwargs):
+ return 0
+ del _temp_ # test weakref
+ s1 = _1_() & _1_()
+ s2 = _1_() & _0_()
+ s3 = _0_() & _1_()
+ gc.collect()
+ self.count = 0
+ def decorate(f, self=self):
+ def wrapper(*args, **kwargs):
+ self.count += 1
+ return f(*args, **kwargs)
+ return wrapper
+ wrap_predicates(decorate)
+ self.assertEqual(s1(None), 2)
+ self.assertEqual(s2(None), 0)
+ self.assertEqual(s3(None), 0)
+ self.assertEqual(self.count, 8)
+
+@contextmanager
+def prepended_syspath(path):
+ sys.path.insert(0, path)
+ yield
+ sys.path = sys.path[1:]
+
+class RegistryStoreTC(TestCase):
+
+ def test_autoload(self):
+ store = RegistryStore()
+ store.setdefault('zereg')
+ with prepended_syspath(self.datadir):
+ with warnings.catch_warnings(record=True) as warns:
+ store.register_objects([self.datapath('regobjects.py'),
+ self.datapath('regobjects2.py')])
+ self.assertIn('use register_modnames() instead',
+ [str(w.message) for w in warns])
+ self.assertEqual(['zereg'], list(store.keys()))
+ self.assertEqual(set(('appobject1', 'appobject2', 'appobject3')),
+ set(store['zereg']))
+
+ def test_autoload_modnames(self):
+ store = RegistryStore()
+ store.setdefault('zereg')
+ with prepended_syspath(self.datadir):
+ store.register_modnames(['regobjects', 'regobjects2'])
+ self.assertEqual(['zereg'], list(store.keys()))
+ self.assertEqual(set(('appobject1', 'appobject2', 'appobject3')),
+ set(store['zereg']))
+
+
+class RegistrableInstanceTC(TestCase):
+
+ def test_instance_modulename(self):
+ with warnings.catch_warnings(record=True) as warns:
+ obj = RegistrableInstance()
+ self.assertEqual(obj.__module__, 'unittest_registry')
+ self.assertIn('instantiate RegistrableInstance with __module__=__name__',
+ [str(w.message) for w in warns])
+ # no inheritance
+ obj = RegistrableInstance(__module__=__name__)
+ self.assertEqual(obj.__module__, 'unittest_registry')
+ # with inheritance from another python file
+ with prepended_syspath(self.datadir):
+ from regobjects2 import instance, MyRegistrableInstance
+ instance2 = MyRegistrableInstance(__module__=__name__)
+ self.assertEqual(instance.__module__, 'regobjects2')
+ self.assertEqual(instance2.__module__, 'unittest_registry')
+
+
+if __name__ == '__main__':
+ unittest_main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_shellutils.py b/pymode/libs/logilab-common-1.4.1/test/unittest_shellutils.py
new file mode 100644
index 00000000..9342ae9b
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_shellutils.py
@@ -0,0 +1,235 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+"""unit tests for logilab.common.shellutils"""
+
+import sys, os, tempfile, shutil
+from os.path import join, dirname, abspath
+import datetime, time
+
+from six.moves import range
+
+from logilab.common.testlib import TestCase, unittest_main
+
+from logilab.common.shellutils import (globfind, find, ProgressBar,
+ RawInput)
+from logilab.common.compat import StringIO
+
+
+DATA_DIR = join(dirname(abspath(__file__)), 'data', 'find_test')
+
+
+class FindTC(TestCase):
+ def test_include(self):
+ files = set(find(DATA_DIR, '.py'))
+ self.assertSetEqual(files,
+ set([join(DATA_DIR, f) for f in ['__init__.py', 'module.py',
+ 'module2.py', 'noendingnewline.py',
+ 'nonregr.py', join('sub', 'momo.py')]]))
+ files = set(find(DATA_DIR, ('.py',), blacklist=('sub',)))
+ self.assertSetEqual(files,
+ set([join(DATA_DIR, f) for f in ['__init__.py', 'module.py',
+ 'module2.py', 'noendingnewline.py',
+ 'nonregr.py']]))
+
+ def test_exclude(self):
+ files = set(find(DATA_DIR, ('.py', '.pyc'), exclude=True))
+ self.assertSetEqual(files,
+ set([join(DATA_DIR, f) for f in ['foo.txt',
+ 'newlines.txt',
+ 'normal_file.txt',
+ 'test.ini',
+ 'test1.msg',
+ 'test2.msg',
+ 'spam.txt',
+ join('sub', 'doc.txt'),
+ 'write_protected_file.txt',
+ ]]))
+
+ def test_globfind(self):
+ files = set(globfind(DATA_DIR, '*.py'))
+ self.assertSetEqual(files,
+ set([join(DATA_DIR, f) for f in ['__init__.py', 'module.py',
+ 'module2.py', 'noendingnewline.py',
+ 'nonregr.py', join('sub', 'momo.py')]]))
+ files = set(globfind(DATA_DIR, 'mo*.py'))
+ self.assertSetEqual(files,
+ set([join(DATA_DIR, f) for f in ['module.py', 'module2.py',
+ join('sub', 'momo.py')]]))
+ files = set(globfind(DATA_DIR, 'mo*.py', blacklist=('sub',)))
+ self.assertSetEqual(files,
+ set([join(DATA_DIR, f) for f in ['module.py', 'module2.py']]))
+
+
+class ProgressBarTC(TestCase):
+ def test_refresh(self):
+ pgb_stream = StringIO()
+ expected_stream = StringIO()
+ pgb = ProgressBar(20, stream=pgb_stream)
+ self.assertEqual(pgb_stream.getvalue(), expected_stream.getvalue()) # nothing print before refresh
+ pgb.refresh()
+ expected_stream.write("\r["+' '*20+"]")
+ self.assertEqual(pgb_stream.getvalue(), expected_stream.getvalue())
+
+ def test_refresh_g_size(self):
+ pgb_stream = StringIO()
+ expected_stream = StringIO()
+ pgb = ProgressBar(20, 35, stream=pgb_stream)
+ pgb.refresh()
+ expected_stream.write("\r["+' '*35+"]")
+ self.assertEqual(pgb_stream.getvalue(), expected_stream.getvalue())
+
+ def test_refresh_l_size(self):
+ pgb_stream = StringIO()
+ expected_stream = StringIO()
+ pgb = ProgressBar(20, 3, stream=pgb_stream)
+ pgb.refresh()
+ expected_stream.write("\r["+' '*3+"]")
+ self.assertEqual(pgb_stream.getvalue(), expected_stream.getvalue())
+
+ def _update_test(self, nbops, expected, size = None):
+ pgb_stream = StringIO()
+ expected_stream = StringIO()
+ if size is None:
+ pgb = ProgressBar(nbops, stream=pgb_stream)
+ size=20
+ else:
+ pgb = ProgressBar(nbops, size, stream=pgb_stream)
+ last = 0
+ for round in expected:
+ if not hasattr(round, '__int__'):
+ dots, update = round
+ else:
+ dots, update = round, None
+ pgb.update()
+ if update or (update is None and dots != last):
+ last = dots
+ expected_stream.write("\r["+('='*dots)+(' '*(size-dots))+"]")
+ self.assertEqual(pgb_stream.getvalue(), expected_stream.getvalue())
+
+ def test_default(self):
+ self._update_test(20, range(1, 21))
+
+ def test_nbops_gt_size(self):
+ """Test the progress bar for nbops > size"""
+ def half(total):
+ for counter in range(1, total+1):
+ yield counter // 2
+ self._update_test(40, half(40))
+
+ def test_nbops_lt_size(self):
+ """Test the progress bar for nbops < size"""
+ def double(total):
+ for counter in range(1, total+1):
+ yield counter * 2
+ self._update_test(10, double(10))
+
+ def test_nbops_nomul_size(self):
+ """Test the progress bar for size % nbops !=0 (non int number of dots per update)"""
+ self._update_test(3, (6, 13, 20))
+
+ def test_overflow(self):
+ self._update_test(5, (8, 16, 25, 33, 42, (42, True)), size=42)
+
+ def test_update_exact(self):
+ pgb_stream = StringIO()
+ expected_stream = StringIO()
+ size=20
+ pgb = ProgressBar(100, size, stream=pgb_stream)
+ last = 0
+ for dots in range(10, 105, 15):
+ pgb.update(dots, exact=True)
+ dots //= 5
+ expected_stream.write("\r["+('='*dots)+(' '*(size-dots))+"]")
+ self.assertEqual(pgb_stream.getvalue(), expected_stream.getvalue())
+
+ def test_update_relative(self):
+ pgb_stream = StringIO()
+ expected_stream = StringIO()
+ size=20
+ pgb = ProgressBar(100, size, stream=pgb_stream)
+ last = 0
+ for dots in range(5, 105, 5):
+ pgb.update(5, exact=False)
+ dots //= 5
+ expected_stream.write("\r["+('='*dots)+(' '*(size-dots))+"]")
+ self.assertEqual(pgb_stream.getvalue(), expected_stream.getvalue())
+
+
+class RawInputTC(TestCase):
+
+ def auto_input(self, *args):
+ self.input_args = args
+ return self.input_answer
+
+ def setUp(self):
+ null_printer = lambda x: None
+ self.qa = RawInput(self.auto_input, null_printer)
+
+ def test_ask_default(self):
+ self.input_answer = ''
+ answer = self.qa.ask('text', ('yes', 'no'), 'yes')
+ self.assertEqual(answer, 'yes')
+ self.input_answer = ' '
+ answer = self.qa.ask('text', ('yes', 'no'), 'yes')
+ self.assertEqual(answer, 'yes')
+
+ def test_ask_case(self):
+ self.input_answer = 'no'
+ answer = self.qa.ask('text', ('yes', 'no'), 'yes')
+ self.assertEqual(answer, 'no')
+ self.input_answer = 'No'
+ answer = self.qa.ask('text', ('yes', 'no'), 'yes')
+ self.assertEqual(answer, 'no')
+ self.input_answer = 'NO'
+ answer = self.qa.ask('text', ('yes', 'no'), 'yes')
+ self.assertEqual(answer, 'no')
+ self.input_answer = 'nO'
+ answer = self.qa.ask('text', ('yes', 'no'), 'yes')
+ self.assertEqual(answer, 'no')
+ self.input_answer = 'YES'
+ answer = self.qa.ask('text', ('yes', 'no'), 'yes')
+ self.assertEqual(answer, 'yes')
+
+ def test_ask_prompt(self):
+ self.input_answer = ''
+ answer = self.qa.ask('text', ('yes', 'no'), 'yes')
+ self.assertEqual(self.input_args[0], 'text [Y(es)/n(o)]: ')
+ answer = self.qa.ask('text', ('y', 'n'), 'y')
+ self.assertEqual(self.input_args[0], 'text [Y/n]: ')
+ answer = self.qa.ask('text', ('n', 'y'), 'y')
+ self.assertEqual(self.input_args[0], 'text [n/Y]: ')
+ answer = self.qa.ask('text', ('yes', 'no', 'maybe', '1'), 'yes')
+ self.assertEqual(self.input_args[0], 'text [Y(es)/n(o)/m(aybe)/1]: ')
+
+ def test_ask_ambiguous(self):
+ self.input_answer = 'y'
+ self.assertRaises(Exception, self.qa.ask, 'text', ('yes', 'yep'), 'yes')
+
+ def test_confirm(self):
+ self.input_answer = 'y'
+ self.assertEqual(self.qa.confirm('Say yes'), True)
+ self.assertEqual(self.qa.confirm('Say yes', default_is_yes=False), True)
+ self.input_answer = 'n'
+ self.assertEqual(self.qa.confirm('Say yes'), False)
+ self.assertEqual(self.qa.confirm('Say yes', default_is_yes=False), False)
+ self.input_answer = ''
+ self.assertEqual(self.qa.confirm('Say default'), True)
+ self.assertEqual(self.qa.confirm('Say default', default_is_yes=False), False)
+
+if __name__ == '__main__':
+ unittest_main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_table.py b/pymode/libs/logilab-common-1.4.1/test/unittest_table.py
new file mode 100644
index 00000000..320b6938
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_table.py
@@ -0,0 +1,448 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+"""
+Unittests for table management
+"""
+
+
+import sys
+import os
+
+from six.moves import range
+
+from logilab.common.compat import StringIO
+from logilab.common.testlib import TestCase, unittest_main
+from logilab.common.table import Table, TableStyleSheet, DocbookTableWriter, \
+ DocbookRenderer, TableStyle, TableWriter, TableCellRenderer
+
+class TableTC(TestCase):
+ """Table TestCase class"""
+
+ def setUp(self):
+ """Creates a default table"""
+ # from logilab.common import table
+ # reload(table)
+ self.table = Table()
+ self.table.create_rows(['row1', 'row2', 'row3'])
+ self.table.create_columns(['col1', 'col2'])
+
+ def test_valeur_scalaire(self):
+ tab = Table()
+ tab.create_columns(['col1'])
+ tab.append_row([1])
+ self.assertEqual(tab, [[1]])
+ tab.append_row([2])
+ self.assertEqual(tab[0, 0], 1)
+ self.assertEqual(tab[1, 0], 2)
+
+ def test_valeur_ligne(self):
+ tab = Table()
+ tab.create_columns(['col1', 'col2'])
+ tab.append_row([1, 2])
+ self.assertEqual(tab, [[1, 2]])
+
+ def test_valeur_colonne(self):
+ tab = Table()
+ tab.create_columns(['col1'])
+ tab.append_row([1])
+ tab.append_row([2])
+ self.assertEqual(tab, [[1], [2]])
+ self.assertEqual(tab[:, 0], [1, 2])
+
+ def test_indexation(self):
+ """we should be able to use [] to access rows"""
+ self.assertEqual(self.table[0], self.table.data[0])
+ self.assertEqual(self.table[1], self.table.data[1])
+
+ def test_iterable(self):
+ """test iter(table)"""
+ it = iter(self.table)
+ self.assertEqual(next(it), self.table.data[0])
+ self.assertEqual(next(it), self.table.data[1])
+
+ def test_get_rows(self):
+ """tests Table.get_rows()"""
+ self.assertEqual(self.table, [[0, 0], [0, 0], [0, 0]])
+ self.assertEqual(self.table[:], [[0, 0], [0, 0], [0, 0]])
+ self.table.insert_column(1, range(3), 'supp')
+ self.assertEqual(self.table, [[0, 0, 0], [0, 1, 0], [0, 2, 0]])
+ self.assertEqual(self.table[:], [[0, 0, 0], [0, 1, 0], [0, 2, 0]])
+
+ def test_get_cells(self):
+ self.table.insert_column(1, range(3), 'supp')
+ self.assertEqual(self.table[0, 1], 0)
+ self.assertEqual(self.table[1, 1], 1)
+ self.assertEqual(self.table[2, 1], 2)
+ self.assertEqual(self.table['row1', 'supp'], 0)
+ self.assertEqual(self.table['row2', 'supp'], 1)
+ self.assertEqual(self.table['row3', 'supp'], 2)
+ self.assertRaises(KeyError, self.table.__getitem__, ('row1', 'foo'))
+ self.assertRaises(KeyError, self.table.__getitem__, ('foo', 'bar'))
+
+ def test_shape(self):
+ """tests table shape"""
+ self.assertEqual(self.table.shape, (3, 2))
+ self.table.insert_column(1, range(3), 'supp')
+ self.assertEqual(self.table.shape, (3, 3))
+
+ def test_set_column(self):
+ """Tests that table.set_column() works fine.
+ """
+ self.table.set_column(0, range(3))
+ self.assertEqual(self.table[0, 0], 0)
+ self.assertEqual(self.table[1, 0], 1)
+ self.assertEqual(self.table[2, 0], 2)
+
+ def test_set_column_by_id(self):
+ """Tests that table.set_column_by_id() works fine.
+ """
+ self.table.set_column_by_id('col1', range(3))
+ self.assertEqual(self.table[0, 0], 0)
+ self.assertEqual(self.table[1, 0], 1)
+ self.assertEqual(self.table[2, 0], 2)
+ self.assertRaises(KeyError, self.table.set_column_by_id, 'col123', range(3))
+
+ def test_cells_ids(self):
+ """tests that we can access cells by giving row/col ids"""
+ self.assertRaises(KeyError, self.table.set_cell_by_ids, 'row12', 'col1', 12)
+ self.assertRaises(KeyError, self.table.set_cell_by_ids, 'row1', 'col12', 12)
+ self.assertEqual(self.table[0, 0], 0)
+ self.table.set_cell_by_ids('row1', 'col1', 'DATA')
+ self.assertEqual(self.table[0, 0], 'DATA')
+ self.assertRaises(KeyError, self.table.set_row_by_id, 'row12', [])
+ self.table.set_row_by_id('row1', ['1.0', '1.1'])
+ self.assertEqual(self.table[0, 0], '1.0')
+
+ def test_insert_row(self):
+ """tests a row insertion"""
+ tmp_data = ['tmp1', 'tmp2']
+ self.table.insert_row(1, tmp_data, 'tmprow')
+ self.assertEqual(self.table[1], tmp_data)
+ self.assertEqual(self.table['tmprow'], tmp_data)
+ self.table.delete_row_by_id('tmprow')
+ self.assertRaises(KeyError, self.table.delete_row_by_id, 'tmprow')
+ self.assertEqual(self.table[1], [0, 0])
+ self.assertRaises(KeyError, self.table.__getitem__, 'tmprow')
+
+ def test_get_column(self):
+ """Tests that table.get_column() works fine.
+ """
+ self.table.set_cell(0, 1, 12)
+ self.table.set_cell(2, 1, 13)
+ self.assertEqual(self.table[:, 1], [12, 0, 13])
+ self.assertEqual(self.table[:, 'col2'], [12, 0, 13])
+
+ def test_get_columns(self):
+ """Tests if table.get_columns() works fine.
+ """
+ self.table.set_cell(0, 1, 12)
+ self.table.set_cell(2, 1, 13)
+ self.assertEqual(self.table.get_columns(), [[0, 0, 0], [12, 0, 13]])
+
+ def test_insert_column(self):
+ """Tests that table.insert_column() works fine.
+ """
+ self.table.insert_column(1, range(3), "inserted_column")
+ self.assertEqual(self.table[:, 1], [0, 1, 2])
+ self.assertEqual(self.table.col_names,
+ ['col1', 'inserted_column', 'col2'])
+
+ def test_delete_column(self):
+ """Tests that table.delete_column() works fine.
+ """
+ self.table.delete_column(1)
+ self.assertEqual(self.table.col_names, ['col1'])
+ self.assertEqual(self.table[:, 0], [0, 0, 0])
+ self.assertRaises(KeyError, self.table.delete_column_by_id, 'col2')
+ self.table.delete_column_by_id('col1')
+ self.assertEqual(self.table.col_names, [])
+
+ def test_transpose(self):
+ """Tests that table.transpose() works fine.
+ """
+ self.table.append_column(range(5, 8), 'col3')
+ ttable = self.table.transpose()
+ self.assertEqual(ttable.row_names, ['col1', 'col2', 'col3'])
+ self.assertEqual(ttable.col_names, ['row1', 'row2', 'row3'])
+ self.assertEqual(ttable.data, [[0, 0, 0], [0, 0, 0], [5, 6, 7]])
+
+ def test_sort_table(self):
+ """Tests the table sort by column
+ """
+ self.table.set_column(0, [3, 1, 2])
+ self.table.set_column(1, [1, 2, 3])
+ self.table.sort_by_column_index(0)
+ self.assertEqual(self.table.row_names, ['row2', 'row3', 'row1'])
+ self.assertEqual(self.table.data, [[1, 2], [2, 3], [3, 1]])
+ self.table.sort_by_column_index(1, 'desc')
+ self.assertEqual(self.table.row_names, ['row3', 'row2', 'row1'])
+ self.assertEqual(self.table.data, [[2, 3], [1, 2], [3, 1]])
+
+ def test_sort_by_id(self):
+ """tests sort_by_column_id()"""
+ self.table.set_column_by_id('col1', [3, 1, 2])
+ self.table.set_column_by_id('col2', [1, 2, 3])
+ self.table.sort_by_column_id('col1')
+ self.assertRaises(KeyError, self.table.sort_by_column_id, 'col123')
+ self.assertEqual(self.table.row_names, ['row2', 'row3', 'row1'])
+ self.assertEqual(self.table.data, [[1, 2], [2, 3], [3, 1]])
+ self.table.sort_by_column_id('col2', 'desc')
+ self.assertEqual(self.table.row_names, ['row3', 'row2', 'row1'])
+ self.assertEqual(self.table.data, [[2, 3], [1, 2], [3, 1]])
+
+ def test_pprint(self):
+ """only tests pprint doesn't raise an exception"""
+ self.table.pprint()
+ str(self.table)
+
+
+class GroupByTC(TestCase):
+ """specific test suite for groupby()"""
+ def setUp(self):
+ t = Table()
+ t.create_columns(['date', 'res', 'task', 'usage'])
+ t.append_row(['date1', 'ing1', 'task1', 0.3])
+ t.append_row(['date1', 'ing2', 'task2', 0.3])
+ t.append_row(['date2', 'ing3', 'task3', 0.3])
+ t.append_row(['date3', 'ing4', 'task2', 0.3])
+ t.append_row(['date1', 'ing1', 'task3', 0.3])
+ t.append_row(['date3', 'ing1', 'task3', 0.3])
+ self.table = t
+
+ def test_single_groupby(self):
+ """tests groupby() on several columns"""
+ grouped = self.table.groupby('date')
+ self.assertEqual(len(grouped), 3)
+ self.assertEqual(len(grouped['date1']), 3)
+ self.assertEqual(len(grouped['date2']), 1)
+ self.assertEqual(len(grouped['date3']), 2)
+ self.assertEqual(grouped['date1'], [
+ ('date1', 'ing1', 'task1', 0.3),
+ ('date1', 'ing2', 'task2', 0.3),
+ ('date1', 'ing1', 'task3', 0.3),
+ ])
+ self.assertEqual(grouped['date2'], [('date2', 'ing3', 'task3', 0.3)])
+ self.assertEqual(grouped['date3'], [
+ ('date3', 'ing4', 'task2', 0.3),
+ ('date3', 'ing1', 'task3', 0.3),
+ ])
+
+ def test_multiple_groupby(self):
+ """tests groupby() on several columns"""
+ grouped = self.table.groupby('date', 'task')
+ self.assertEqual(len(grouped), 3)
+ self.assertEqual(len(grouped['date1']), 3)
+ self.assertEqual(len(grouped['date2']), 1)
+ self.assertEqual(len(grouped['date3']), 2)
+ self.assertEqual(grouped['date1']['task1'], [('date1', 'ing1', 'task1', 0.3)])
+ self.assertEqual(grouped['date2']['task3'], [('date2', 'ing3', 'task3', 0.3)])
+ self.assertEqual(grouped['date3']['task2'], [('date3', 'ing4', 'task2', 0.3)])
+ date3 = grouped['date3']
+ self.assertRaises(KeyError, date3.__getitem__, 'task1')
+
+
+ def test_select(self):
+ """tests Table.select() method"""
+ rows = self.table.select('date', 'date1')
+ self.assertEqual(rows, [
+ ('date1', 'ing1', 'task1', 0.3),
+ ('date1', 'ing2', 'task2', 0.3),
+ ('date1', 'ing1', 'task3', 0.3),
+ ])
+
+class TableStyleSheetTC(TestCase):
+ """The Stylesheet test case
+ """
+ def setUp(self):
+ """Builds a simple table to test the stylesheet
+ """
+ self.table = Table()
+ self.table.create_row('row1')
+ self.table.create_columns(['a', 'b', 'c'])
+ self.stylesheet = TableStyleSheet()
+ # We don't want anything to be printed
+ self.stdout_backup = sys.stdout
+ sys.stdout = StringIO()
+
+ def tearDown(self):
+ sys.stdout = self.stdout_backup
+
+ def test_add_rule(self):
+ """Tests that the regex pattern works as expected.
+ """
+ rule = '0_2 = sqrt(0_0**2 + 0_1**2)'
+ self.stylesheet.add_rule(rule)
+ self.table.set_row(0, [3, 4, 0])
+ self.table.apply_stylesheet(self.stylesheet)
+ self.assertEqual(self.table[0], [3, 4, 5])
+ self.assertEqual(len(self.stylesheet.rules), 1)
+ self.stylesheet.add_rule('some bad rule with bad syntax')
+ self.assertEqual(len(self.stylesheet.rules), 1, "Ill-formed rule mustn't be added")
+ self.assertEqual(len(self.stylesheet.instructions), 1, "Ill-formed rule mustn't be added")
+
+ def test_stylesheet_init(self):
+ """tests Stylesheet.__init__"""
+ rule = '0_2 = 1'
+ sheet = TableStyleSheet([rule, 'bad rule'])
+ self.assertEqual(len(sheet.rules), 1, "Ill-formed rule mustn't be added")
+ self.assertEqual(len(sheet.instructions), 1, "Ill-formed rule mustn't be added")
+
+ def test_rowavg_rule(self):
+ """Tests that add_rowavg_rule works as expected
+ """
+ self.table.set_row(0, [10, 20, 0])
+ self.stylesheet.add_rowavg_rule((0, 2), 0, 0, 1)
+ self.table.apply_stylesheet(self.stylesheet)
+ val = self.table[0, 2]
+ self.assertEqual(int(val), 15)
+
+
+ def test_rowsum_rule(self):
+ """Tests that add_rowsum_rule works as expected
+ """
+ self.table.set_row(0, [10, 20, 0])
+ self.stylesheet.add_rowsum_rule((0, 2), 0, 0, 1)
+ self.table.apply_stylesheet(self.stylesheet)
+ val = self.table[0, 2]
+ self.assertEqual(val, 30)
+
+
+ def test_colavg_rule(self):
+ """Tests that add_colavg_rule works as expected
+ """
+ self.table.set_row(0, [10, 20, 0])
+ self.table.append_row([12, 8, 3], 'row2')
+ self.table.create_row('row3')
+ self.stylesheet.add_colavg_rule((2, 0), 0, 0, 1)
+ self.table.apply_stylesheet(self.stylesheet)
+ val = self.table[2, 0]
+ self.assertEqual(int(val), 11)
+
+
+ def test_colsum_rule(self):
+ """Tests that add_colsum_rule works as expected
+ """
+ self.table.set_row(0, [10, 20, 0])
+ self.table.append_row([12, 8, 3], 'row2')
+ self.table.create_row('row3')
+ self.stylesheet.add_colsum_rule((2, 0), 0, 0, 1)
+ self.table.apply_stylesheet(self.stylesheet)
+ val = self.table[2, 0]
+ self.assertEqual(val, 22)
+
+
+
+class TableStyleTC(TestCase):
+ """Test suite for TableSuite"""
+ def setUp(self):
+ self.table = Table()
+ self.table.create_rows(['row1', 'row2', 'row3'])
+ self.table.create_columns(['col1', 'col2'])
+ self.style = TableStyle(self.table)
+ self._tested_attrs = (('size', '1*'),
+ ('alignment', 'right'),
+ ('unit', ''))
+
+ def test_getset(self):
+ """tests style's get and set methods"""
+ for attrname, default_value in self._tested_attrs:
+ getter = getattr(self.style, 'get_%s' % attrname)
+ setter = getattr(self.style, 'set_%s' % attrname)
+ self.assertRaises(KeyError, getter, 'badcol')
+ self.assertEqual(getter('col1'), default_value)
+ setter('FOO', 'col1')
+ self.assertEqual(getter('col1'), 'FOO')
+
+ def test_getset_index(self):
+ """tests style's get and set by index methods"""
+ for attrname, default_value in self._tested_attrs:
+ getter = getattr(self.style, 'get_%s' % attrname)
+ setter = getattr(self.style, 'set_%s' % attrname)
+ igetter = getattr(self.style, 'get_%s_by_index' % attrname)
+ isetter = getattr(self.style, 'set_%s_by_index' % attrname)
+ self.assertEqual(getter('__row_column__'), default_value)
+ isetter('FOO', 0)
+ self.assertEqual(getter('__row_column__'), 'FOO')
+ self.assertEqual(igetter(0), 'FOO')
+ self.assertEqual(getter('col1'), default_value)
+ isetter('FOO', 1)
+ self.assertEqual(getter('col1'), 'FOO')
+ self.assertEqual(igetter(1), 'FOO')
+
+
+class RendererTC(TestCase):
+ """Test suite for DocbookRenderer"""
+ def setUp(self):
+ self.renderer = DocbookRenderer(alignment = True)
+ self.table = Table()
+ self.table.create_rows(['row1', 'row2', 'row3'])
+ self.table.create_columns(['col1', 'col2'])
+ self.style = TableStyle(self.table)
+ self.base_renderer = TableCellRenderer()
+
+ def test_cell_content(self):
+ """test how alignment is rendered"""
+ entry_xml = self.renderer._render_cell_content('data', self.style, 1)
+ self.assertEqual(entry_xml, "data\n")
+ self.style.set_alignment_by_index('left', 1)
+ entry_xml = self.renderer._render_cell_content('data', self.style, 1)
+ self.assertEqual(entry_xml, "data\n")
+
+ def test_default_content_rendering(self):
+ """tests that default rendering just prints the cell's content"""
+ rendered_cell = self.base_renderer._render_cell_content('data', self.style, 1)
+ self.assertEqual(rendered_cell, "data")
+
+ def test_replacement_char(self):
+ """tests that 0 is replaced when asked for"""
+ cell_content = self.base_renderer._make_cell_content(0, self.style, 1)
+ self.assertEqual(cell_content, 0)
+ self.base_renderer.properties['skip_zero'] = '---'
+ cell_content = self.base_renderer._make_cell_content(0, self.style, 1)
+ self.assertEqual(cell_content, '---')
+
+ def test_unit(self):
+ """tests if units are added"""
+ self.base_renderer.properties['units'] = True
+ self.style.set_unit_by_index('EUR', 1)
+ cell_content = self.base_renderer._make_cell_content(12, self.style, 1)
+ self.assertEqual(cell_content, '12 EUR')
+
+
+class DocbookTableWriterTC(TestCase):
+ """TestCase for table's writer"""
+ def setUp(self):
+ self.stream = StringIO()
+ self.table = Table()
+ self.table.create_rows(['row1', 'row2', 'row3'])
+ self.table.create_columns(['col1', 'col2'])
+ self.writer = DocbookTableWriter(self.stream, self.table, None)
+ self.writer.set_renderer(DocbookRenderer())
+
+ def test_write_table(self):
+ """make sure write_table() doesn't raise any exception"""
+ self.writer.write_table()
+
+ def test_abstract_writer(self):
+ """tests that Abstract Writers can't be used !"""
+ writer = TableWriter(self.stream, self.table, None)
+ self.assertRaises(NotImplementedError, writer.write_table)
+
+
+if __name__ == '__main__':
+ unittest_main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_taskqueue.py b/pymode/libs/logilab-common-1.4.1/test/unittest_taskqueue.py
new file mode 100644
index 00000000..d8b6a9e7
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_taskqueue.py
@@ -0,0 +1,71 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+from logilab.common.testlib import TestCase, unittest_main
+
+from logilab.common.tasksqueue import *
+
+class TaskTC(TestCase):
+
+ def test_eq(self):
+ self.assertFalse(Task('t1') == Task('t2'))
+ self.assertTrue(Task('t1') == Task('t1'))
+
+ def test_cmp(self):
+ self.assertTrue(Task('t1', LOW) < Task('t2', MEDIUM))
+ self.assertFalse(Task('t1', LOW) > Task('t2', MEDIUM))
+ self.assertTrue(Task('t1', HIGH) > Task('t2', MEDIUM))
+ self.assertFalse(Task('t1', HIGH) < Task('t2', MEDIUM))
+
+
+class PrioritizedTasksQueueTC(TestCase):
+
+ def test_priority(self):
+ queue = PrioritizedTasksQueue()
+ queue.put(Task('t1'))
+ queue.put(Task('t2', MEDIUM))
+ queue.put(Task('t3', HIGH))
+ queue.put(Task('t4', LOW))
+ self.assertEqual(queue.get().id, 't3')
+ self.assertEqual(queue.get().id, 't2')
+ self.assertEqual(queue.get().id, 't1')
+ self.assertEqual(queue.get().id, 't4')
+
+ def test_remove_equivalent(self):
+ queue = PrioritizedTasksQueue()
+ queue.put(Task('t1'))
+ queue.put(Task('t2', MEDIUM))
+ queue.put(Task('t1', HIGH))
+ queue.put(Task('t3', MEDIUM))
+ queue.put(Task('t2', MEDIUM))
+ self.assertEqual(queue.qsize(), 3)
+ self.assertEqual(queue.get().id, 't1')
+ self.assertEqual(queue.get().id, 't2')
+ self.assertEqual(queue.get().id, 't3')
+ self.assertEqual(queue.qsize(), 0)
+
+ def test_remove(self):
+ queue = PrioritizedTasksQueue()
+ queue.put(Task('t1'))
+ queue.put(Task('t2'))
+ queue.put(Task('t3'))
+ queue.remove('t2')
+ self.assertEqual([t.id for t in queue], ['t3', 't1'])
+ self.assertRaises(ValueError, queue.remove, 't4')
+
+if __name__ == '__main__':
+ unittest_main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_testlib.py b/pymode/libs/logilab-common-1.4.1/test/unittest_testlib.py
new file mode 100644
index 00000000..fe2e31a8
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_testlib.py
@@ -0,0 +1,790 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+"""unittest module for logilab.comon.testlib"""
+
+from __future__ import print_function
+
+import os
+import sys
+from os.path import join, dirname, isdir, isfile, abspath, exists
+import tempfile
+import shutil
+
+try:
+ __file__
+except NameError:
+ __file__ = sys.argv[0]
+
+from six.moves import range
+
+from logilab.common.compat import StringIO
+from logilab.common.testlib import (unittest, TestSuite, unittest_main, Tags,
+ TestCase, mock_object, create_files, InnerTest, with_tempdir, tag,
+ require_version, require_module)
+from logilab.common.pytest import SkipAwareTextTestRunner, NonStrictTestLoader
+
+
+class MockTestCase(TestCase):
+ def __init__(self):
+ # Do not call unittest.TestCase's __init__
+ pass
+
+ def fail(self, msg):
+ raise AssertionError(msg)
+
+class UtilTC(TestCase):
+
+ def test_mockobject(self):
+ obj = mock_object(foo='bar', baz='bam')
+ self.assertEqual(obj.foo, 'bar')
+ self.assertEqual(obj.baz, 'bam')
+
+ def test_create_files(self):
+ chroot = tempfile.mkdtemp()
+ path_to = lambda path: join(chroot, path)
+ dircontent = lambda path: sorted(os.listdir(join(chroot, path)))
+ try:
+ self.assertFalse(isdir(path_to('a/')))
+ create_files(['a/b/foo.py', 'a/b/c/', 'a/b/c/d/e.py'], chroot)
+ # make sure directories exist
+ self.assertTrue(isdir(path_to('a')))
+ self.assertTrue(isdir(path_to('a/b')))
+ self.assertTrue(isdir(path_to('a/b/c')))
+ self.assertTrue(isdir(path_to('a/b/c/d')))
+ # make sure files exist
+ self.assertTrue(isfile(path_to('a/b/foo.py')))
+ self.assertTrue(isfile(path_to('a/b/c/d/e.py')))
+ # make sure only asked files were created
+ self.assertEqual(dircontent('a'), ['b'])
+ self.assertEqual(dircontent('a/b'), ['c', 'foo.py'])
+ self.assertEqual(dircontent('a/b/c'), ['d'])
+ self.assertEqual(dircontent('a/b/c/d'), ['e.py'])
+ finally:
+ shutil.rmtree(chroot)
+
+
+class TestlibTC(TestCase):
+
+ def mkdir(self, path):
+ if not exists(path):
+ self._dirs.add(path)
+ os.mkdir(path)
+
+ def setUp(self):
+ self.tc = MockTestCase()
+ self._dirs = set()
+
+ def tearDown(self):
+ while(self._dirs):
+ shutil.rmtree(self._dirs.pop(), ignore_errors=True)
+
+ def test_dict_equals(self):
+ """tests TestCase.assertDictEqual"""
+ d1 = {'a' : 1, 'b' : 2}
+ d2 = {'a' : 1, 'b' : 3}
+ d3 = dict(d1)
+ self.assertRaises(AssertionError, self.tc.assertDictEqual, d1, d2)
+ self.tc.assertDictEqual(d1, d3)
+ self.tc.assertDictEqual(d3, d1)
+ self.tc.assertDictEqual(d1, d1)
+
+ def test_list_equals(self):
+ """tests TestCase.assertListEqual"""
+ l1 = list(range(10))
+ l2 = list(range(5))
+ l3 = list(range(10))
+ self.assertRaises(AssertionError, self.tc.assertListEqual, l1, l2)
+ self.tc.assertListEqual(l1, l1)
+ self.tc.assertListEqual(l1, l3)
+ self.tc.assertListEqual(l3, l1)
+
+ def test_equality_for_sets(self):
+ s1 = set('ab')
+ s2 = set('a')
+ self.assertRaises(AssertionError, self.tc.assertSetEqual, s1, s2)
+ self.tc.assertSetEqual(s1, s1)
+ self.tc.assertSetEqual(set(), set())
+
+ def test_text_equality(self):
+ self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, "toto", 12)
+ self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, "toto", 12)
+ self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, "toto", None)
+ self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, "toto", None)
+ self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, 3.12, u"toto")
+ self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, 3.12, u"toto")
+ self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, None, u"toto")
+ self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, None, u"toto")
+ self.tc.assertMultiLineEqual('toto\ntiti', 'toto\ntiti')
+ self.tc.assertMultiLineEqual('toto\ntiti', 'toto\ntiti')
+ self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, 'toto\ntiti', 'toto\n titi\n')
+ self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, 'toto\ntiti', 'toto\n titi\n')
+ foo = join(dirname(__file__), 'data', 'foo.txt')
+ spam = join(dirname(__file__), 'data', 'spam.txt')
+ with open(foo) as fobj:
+ text1 = fobj.read()
+ self.tc.assertMultiLineEqual(text1, text1)
+ self.tc.assertMultiLineEqual(text1, text1)
+ with open(spam) as fobj:
+ text2 = fobj.read()
+ self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, text1, text2)
+ self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, text1, text2)
+
+ def test_default_datadir(self):
+ expected_datadir = join(dirname(abspath(__file__)), 'data')
+ self.assertEqual(self.datadir, expected_datadir)
+ self.assertEqual(self.datapath('foo'), join(expected_datadir, 'foo'))
+
+ def test_multiple_args_datadir(self):
+ expected_datadir = join(dirname(abspath(__file__)), 'data')
+ self.assertEqual(self.datadir, expected_datadir)
+ self.assertEqual(self.datapath('foo', 'bar'), join(expected_datadir, 'foo', 'bar'))
+
+ def test_custom_datadir(self):
+ class MyTC(TestCase):
+ datadir = 'foo'
+ def test_1(self): pass
+
+ # class' custom datadir
+ tc = MyTC('test_1')
+ self.assertEqual(tc.datapath('bar'), join('foo', 'bar'))
+
+ def test_cached_datadir(self):
+ """test datadir is cached on the class"""
+ class MyTC(TestCase):
+ def test_1(self): pass
+
+ expected_datadir = join(dirname(abspath(__file__)), 'data')
+ tc = MyTC('test_1')
+ self.assertEqual(tc.datadir, expected_datadir)
+ # changing module should not change the datadir
+ MyTC.__module__ = 'os'
+ self.assertEqual(tc.datadir, expected_datadir)
+ # even on new instances
+ tc2 = MyTC('test_1')
+ self.assertEqual(tc2.datadir, expected_datadir)
+
+ def test_is(self):
+ obj_1 = []
+ obj_2 = []
+ self.assertIs(obj_1, obj_1)
+ self.assertRaises(AssertionError, self.assertIs, obj_1, obj_2)
+
+ def test_isnot(self):
+ obj_1 = []
+ obj_2 = []
+ self.assertIsNot(obj_1, obj_2)
+ self.assertRaises(AssertionError, self.assertIsNot, obj_1, obj_1)
+
+ def test_none(self):
+ self.assertIsNone(None)
+ self.assertRaises(AssertionError, self.assertIsNone, object())
+
+ def test_not_none(self):
+ self.assertIsNotNone(object())
+ self.assertRaises(AssertionError, self.assertIsNotNone, None)
+
+ def test_in(self):
+ self.assertIn("a", "dsqgaqg")
+ obj, seq = 'a', ('toto', "azf", "coin")
+ self.assertRaises(AssertionError, self.assertIn, obj, seq)
+
+ def test_not_in(self):
+ self.assertNotIn('a', ('toto', "azf", "coin"))
+ self.assertRaises(AssertionError, self.assertNotIn, 'a', "dsqgaqg")
+
+
+class GenerativeTestsTC(TestCase):
+
+ def setUp(self):
+ output = StringIO()
+ self.runner = SkipAwareTextTestRunner(stream=output)
+
+ def test_generative_ok(self):
+ class FooTC(TestCase):
+ def test_generative(self):
+ for i in range(10):
+ yield self.assertEqual, i, i
+ result = self.runner.run(FooTC('test_generative'))
+ self.assertEqual(result.testsRun, 10)
+ self.assertEqual(len(result.failures), 0)
+ self.assertEqual(len(result.errors), 0)
+
+ def test_generative_half_bad(self):
+ class FooTC(TestCase):
+ def test_generative(self):
+ for i in range(10):
+ yield self.assertEqual, i%2, 0
+ result = self.runner.run(FooTC('test_generative'))
+ self.assertEqual(result.testsRun, 10)
+ self.assertEqual(len(result.failures), 5)
+ self.assertEqual(len(result.errors), 0)
+
+ def test_generative_error(self):
+ class FooTC(TestCase):
+ def test_generative(self):
+ for i in range(10):
+ if i == 5:
+ raise ValueError('STOP !')
+ yield self.assertEqual, i, i
+
+ result = self.runner.run(FooTC('test_generative'))
+ self.assertEqual(result.testsRun, 5)
+ self.assertEqual(len(result.failures), 0)
+ self.assertEqual(len(result.errors), 1)
+
+ def test_generative_error2(self):
+ class FooTC(TestCase):
+ def test_generative(self):
+ for i in range(10):
+ if i == 5:
+ yield self.ouch
+ yield self.assertEqual, i, i
+ def ouch(self): raise ValueError('stop !')
+ result = self.runner.run(FooTC('test_generative'))
+ self.assertEqual(result.testsRun, 11)
+ self.assertEqual(len(result.failures), 0)
+ self.assertEqual(len(result.errors), 1)
+
+ def test_generative_setup(self):
+ class FooTC(TestCase):
+ def setUp(self):
+ raise ValueError('STOP !')
+ def test_generative(self):
+ for i in range(10):
+ yield self.assertEqual, i, i
+
+ result = self.runner.run(FooTC('test_generative'))
+ self.assertEqual(result.testsRun, 1)
+ self.assertEqual(len(result.failures), 0)
+ self.assertEqual(len(result.errors), 1)
+
+ def test_generative_inner_skip(self):
+ class FooTC(TestCase):
+ def check(self, val):
+ if val == 5:
+ self.innerSkip("no 5")
+ else:
+ self.assertEqual(val, val)
+
+ def test_generative(self):
+ for i in range(10):
+ yield InnerTest("check_%s"%i, self.check, i)
+
+ result = self.runner.run(FooTC('test_generative'))
+ self.assertEqual(result.testsRun, 10)
+ self.assertEqual(len(result.failures), 0)
+ self.assertEqual(len(result.errors), 0)
+ self.assertEqual(len(result.skipped), 1)
+
+ def test_generative_skip(self):
+ class FooTC(TestCase):
+ def check(self, val):
+ if val == 5:
+ self.skipTest("no 5")
+ else:
+ self.assertEqual(val, val)
+
+ def test_generative(self):
+ for i in range(10):
+ yield InnerTest("check_%s"%i, self.check, i)
+
+ result = self.runner.run(FooTC('test_generative'))
+ self.assertEqual(result.testsRun, 10)
+ self.assertEqual(len(result.failures), 0)
+ self.assertEqual(len(result.errors), 0)
+ self.assertEqual(len(result.skipped), 1)
+
+ def test_generative_inner_error(self):
+ class FooTC(TestCase):
+ def check(self, val):
+ if val == 5:
+ raise ValueError("no 5")
+ else:
+ self.assertEqual(val, val)
+
+ def test_generative(self):
+ for i in range(10):
+ yield InnerTest("check_%s"%i, self.check, i)
+
+ result = self.runner.run(FooTC('test_generative'))
+ self.assertEqual(result.testsRun, 10)
+ self.assertEqual(len(result.failures), 0)
+ self.assertEqual(len(result.errors), 1)
+ self.assertEqual(len(result.skipped), 0)
+
+ def test_generative_inner_failure(self):
+ class FooTC(TestCase):
+ def check(self, val):
+ if val == 5:
+ self.assertEqual(val, val+1)
+ else:
+ self.assertEqual(val, val)
+
+ def test_generative(self):
+ for i in range(10):
+ yield InnerTest("check_%s"%i, self.check, i)
+
+ result = self.runner.run(FooTC('test_generative'))
+ self.assertEqual(result.testsRun, 10)
+ self.assertEqual(len(result.failures), 1)
+ self.assertEqual(len(result.errors), 0)
+ self.assertEqual(len(result.skipped), 0)
+
+
+ def test_generative_outer_failure(self):
+ class FooTC(TestCase):
+ def test_generative(self):
+ self.fail()
+ yield
+
+ result = self.runner.run(FooTC('test_generative'))
+ self.assertEqual(result.testsRun, 0)
+ self.assertEqual(len(result.failures), 1)
+ self.assertEqual(len(result.errors), 0)
+ self.assertEqual(len(result.skipped), 0)
+
+ def test_generative_outer_skip(self):
+ class FooTC(TestCase):
+ def test_generative(self):
+ self.skipTest('blah')
+ yield
+
+ result = self.runner.run(FooTC('test_generative'))
+ self.assertEqual(result.testsRun, 0)
+ self.assertEqual(len(result.failures), 0)
+ self.assertEqual(len(result.errors), 0)
+ self.assertEqual(len(result.skipped), 1)
+
+
+class ExitFirstTC(TestCase):
+ def setUp(self):
+ output = StringIO()
+ self.runner = SkipAwareTextTestRunner(stream=output, exitfirst=True)
+
+ def test_failure_exit_first(self):
+ class FooTC(TestCase):
+ def test_1(self): pass
+ def test_2(self): assert False
+ def test_3(self): pass
+ tests = [FooTC('test_1'), FooTC('test_2')]
+ result = self.runner.run(TestSuite(tests))
+ self.assertEqual(result.testsRun, 2)
+ self.assertEqual(len(result.failures), 1)
+ self.assertEqual(len(result.errors), 0)
+
+ def test_error_exit_first(self):
+ class FooTC(TestCase):
+ def test_1(self): pass
+ def test_2(self): raise ValueError()
+ def test_3(self): pass
+ tests = [FooTC('test_1'), FooTC('test_2'), FooTC('test_3')]
+ result = self.runner.run(TestSuite(tests))
+ self.assertEqual(result.testsRun, 2)
+ self.assertEqual(len(result.failures), 0)
+ self.assertEqual(len(result.errors), 1)
+
+ def test_generative_exit_first(self):
+ class FooTC(TestCase):
+ def test_generative(self):
+ for i in range(10):
+ yield self.assertTrue, False
+ result = self.runner.run(FooTC('test_generative'))
+ self.assertEqual(result.testsRun, 1)
+ self.assertEqual(len(result.failures), 1)
+ self.assertEqual(len(result.errors), 0)
+
+
+class TestLoaderTC(TestCase):
+ ## internal classes for test purposes ########
+ class FooTC(TestCase):
+ def test_foo1(self): pass
+ def test_foo2(self): pass
+ def test_bar1(self): pass
+
+ class BarTC(TestCase):
+ def test_bar2(self): pass
+ ##############################################
+
+ def setUp(self):
+ self.loader = NonStrictTestLoader()
+ self.module = TestLoaderTC # mock_object(FooTC=TestLoaderTC.FooTC, BarTC=TestLoaderTC.BarTC)
+ self.output = StringIO()
+ self.runner = SkipAwareTextTestRunner(stream=self.output)
+
+ def assertRunCount(self, pattern, module, expected_count, skipped=()):
+ self.loader.test_pattern = pattern
+ self.loader.skipped_patterns = skipped
+ if pattern:
+ suite = self.loader.loadTestsFromNames([pattern], module)
+ else:
+ suite = self.loader.loadTestsFromModule(module)
+ result = self.runner.run(suite)
+ self.loader.test_pattern = None
+ self.loader.skipped_patterns = ()
+ self.assertEqual(result.testsRun, expected_count)
+
+ def test_collect_everything(self):
+ """make sure we don't change the default behaviour
+ for loadTestsFromModule() and loadTestsFromTestCase
+ """
+ testsuite = self.loader.loadTestsFromModule(self.module)
+ self.assertEqual(len(testsuite._tests), 2)
+ suite1, suite2 = testsuite._tests
+ self.assertEqual(len(suite1._tests) + len(suite2._tests), 4)
+
+ def test_collect_with_classname(self):
+ self.assertRunCount('FooTC', self.module, 3)
+ self.assertRunCount('BarTC', self.module, 1)
+
+ def test_collect_with_classname_and_pattern(self):
+ data = [('FooTC.test_foo1', 1), ('FooTC.test_foo', 2), ('FooTC.test_fo', 2),
+ ('FooTC.foo1', 1), ('FooTC.foo', 2), ('FooTC.whatever', 0)
+ ]
+ for pattern, expected_count in data:
+ yield self.assertRunCount, pattern, self.module, expected_count
+
+ def test_collect_with_pattern(self):
+ data = [('test_foo1', 1), ('test_foo', 2), ('test_bar', 2),
+ ('foo1', 1), ('foo', 2), ('bar', 2), ('ba', 2),
+ ('test', 4), ('ab', 0),
+ ]
+ for pattern, expected_count in data:
+ yield self.assertRunCount, pattern, self.module, expected_count
+
+ def test_testcase_with_custom_metaclass(self):
+ class mymetaclass(type): pass
+ class MyMod:
+ class MyTestCase(TestCase):
+ __metaclass__ = mymetaclass
+ def test_foo1(self): pass
+ def test_foo2(self): pass
+ def test_bar(self): pass
+ data = [('test_foo1', 1), ('test_foo', 2), ('test_bar', 1),
+ ('foo1', 1), ('foo', 2), ('bar', 1), ('ba', 1),
+ ('test', 3), ('ab', 0),
+ ('MyTestCase.test_foo1', 1), ('MyTestCase.test_foo', 2),
+ ('MyTestCase.test_fo', 2), ('MyTestCase.foo1', 1),
+ ('MyTestCase.foo', 2), ('MyTestCase.whatever', 0)
+ ]
+ for pattern, expected_count in data:
+ yield self.assertRunCount, pattern, MyMod, expected_count
+
+ def test_collect_everything_and_skipped_patterns(self):
+ testdata = [ (['foo1'], 3), (['foo'], 2),
+ (['foo', 'bar'], 0), ]
+ for skipped, expected_count in testdata:
+ yield self.assertRunCount, None, self.module, expected_count, skipped
+
+ def test_collect_specific_pattern_and_skip_some(self):
+ testdata = [ ('bar', ['foo1'], 2), ('bar', [], 2),
+ ('bar', ['bar'], 0), ]
+ for runpattern, skipped, expected_count in testdata:
+ yield self.assertRunCount, runpattern, self.module, expected_count, skipped
+
+ def test_skip_classname(self):
+ testdata = [ (['BarTC'], 3), (['FooTC'], 1), ]
+ for skipped, expected_count in testdata:
+ yield self.assertRunCount, None, self.module, expected_count, skipped
+
+ def test_skip_classname_and_specific_collect(self):
+ testdata = [ ('bar', ['BarTC'], 1), ('foo', ['FooTC'], 0), ]
+ for runpattern, skipped, expected_count in testdata:
+ yield self.assertRunCount, runpattern, self.module, expected_count, skipped
+
+ def test_nonregr_dotted_path(self):
+ self.assertRunCount('FooTC.test_foo', self.module, 2)
+
+ def test_inner_tests_selection(self):
+ class MyMod:
+ class MyTestCase(TestCase):
+ def test_foo(self): pass
+ def test_foobar(self):
+ for i in range(5):
+ if i%2 == 0:
+ yield InnerTest('even', lambda: None)
+ else:
+ yield InnerTest('odd', lambda: None)
+ yield lambda: None
+
+ # FIXME InnerTest masked by pattern usage
+ # data = [('foo', 7), ('test_foobar', 6), ('even', 3), ('odd', 2), ]
+ data = [('foo', 7), ('test_foobar', 6), ('even', 0), ('odd', 0), ]
+ for pattern, expected_count in data:
+ yield self.assertRunCount, pattern, MyMod, expected_count
+
+ def test_nonregr_class_skipped_option(self):
+ class MyMod:
+ class MyTestCase(TestCase):
+ def test_foo(self): pass
+ def test_bar(self): pass
+ class FooTC(TestCase):
+ def test_foo(self): pass
+ self.assertRunCount('foo', MyMod, 2)
+ self.assertRunCount(None, MyMod, 3)
+ self.assertRunCount('foo', MyMod, 1, ['FooTC'])
+ self.assertRunCount(None, MyMod, 2, ['FooTC'])
+
+ def test__classes_are_ignored(self):
+ class MyMod:
+ class _Base(TestCase):
+ def test_1(self): pass
+ class MyTestCase(_Base):
+ def test_2(self): pass
+ self.assertRunCount(None, MyMod, 2)
+
+
+class DecoratorTC(TestCase):
+
+ @with_tempdir
+ def test_tmp_dir_normal_1(self):
+ tempdir = tempfile.gettempdir()
+ # assert temp directory is empty
+ self.assertListEqual(list(os.walk(tempdir)),
+ [(tempdir, [], [])])
+
+ witness = []
+
+ @with_tempdir
+ def createfile(list):
+ fd1, fn1 = tempfile.mkstemp()
+ fd2, fn2 = tempfile.mkstemp()
+ dir = tempfile.mkdtemp()
+ fd3, fn3 = tempfile.mkstemp(dir=dir)
+ tempfile.mkdtemp()
+ list.append(True)
+ for fd in (fd1, fd2, fd3):
+ os.close(fd)
+
+ self.assertFalse(witness)
+ createfile(witness)
+ self.assertTrue(witness)
+
+ self.assertEqual(tempfile.gettempdir(), tempdir)
+
+ # assert temp directory is empty
+ self.assertListEqual(list(os.walk(tempdir)),
+ [(tempdir, [], [])])
+
+ @with_tempdir
+ def test_tmp_dir_normal_2(self):
+ tempdir = tempfile.gettempdir()
+ # assert temp directory is empty
+ self.assertListEqual(list(os.walk(tempfile.tempdir)),
+ [(tempfile.tempdir, [], [])])
+
+
+ class WitnessException(Exception):
+ pass
+
+ @with_tempdir
+ def createfile():
+ fd1, fn1 = tempfile.mkstemp()
+ fd2, fn2 = tempfile.mkstemp()
+ dir = tempfile.mkdtemp()
+ fd3, fn3 = tempfile.mkstemp(dir=dir)
+ tempfile.mkdtemp()
+ for fd in (fd1, fd2, fd3):
+ os.close(fd)
+ raise WitnessException()
+
+ self.assertRaises(WitnessException, createfile)
+
+ # assert tempdir didn't change
+ self.assertEqual(tempfile.gettempdir(), tempdir)
+
+ # assert temp directory is empty
+ self.assertListEqual(list(os.walk(tempdir)),
+ [(tempdir, [], [])])
+
+ def test_tmpdir_generator(self):
+ orig_tempdir = tempfile.gettempdir()
+
+ @with_tempdir
+ def gen():
+ yield tempfile.gettempdir()
+
+ for tempdir in gen():
+ self.assertNotEqual(orig_tempdir, tempdir)
+ self.assertEqual(orig_tempdir, tempfile.gettempdir())
+
+ def setUp(self):
+ self.pyversion = sys.version_info
+
+ def tearDown(self):
+ sys.version_info = self.pyversion
+
+ def test_require_version_good(self):
+ """ should return the same function
+ """
+ def func() :
+ pass
+ sys.version_info = (2, 5, 5, 'final', 4)
+ current = sys.version_info[:3]
+ compare = ('2.4', '2.5', '2.5.4', '2.5.5')
+ for version in compare:
+ decorator = require_version(version)
+ self.assertEqual(func, decorator(func), '%s =< %s : function \
+ return by the decorator should be the same.' % (version,
+ '.'.join([str(element) for element in current])))
+
+ def test_require_version_bad(self):
+ """ should return a different function : skipping test
+ """
+ def func() :
+ pass
+ sys.version_info = (2, 5, 5, 'final', 4)
+ current = sys.version_info[:3]
+ compare = ('2.5.6', '2.6', '2.6.5')
+ for version in compare:
+ decorator = require_version(version)
+ self.assertNotEqual(func, decorator(func), '%s >= %s : function \
+ return by the decorator should NOT be the same.'
+ % ('.'.join([str(element) for element in current]), version))
+
+ def test_require_version_exception(self):
+ """ should throw a ValueError exception
+ """
+ def func() :
+ pass
+ compare = ('2.5.a', '2.a', 'azerty')
+ for version in compare:
+ decorator = require_version(version)
+ self.assertRaises(ValueError, decorator, func)
+
+ def test_require_module_good(self):
+ """ should return the same function
+ """
+ def func() :
+ pass
+ module = 'sys'
+ decorator = require_module(module)
+ self.assertEqual(func, decorator(func), 'module %s exists : function \
+ return by the decorator should be the same.' % module)
+
+ def test_require_module_bad(self):
+ """ should return a different function : skipping test
+ """
+ def func() :
+ pass
+ modules = ('bla', 'blo', 'bli')
+ for module in modules:
+ try:
+ __import__(module)
+ pass
+ except ImportError:
+ decorator = require_module(module)
+ self.assertNotEqual(func, decorator(func), 'module %s does \
+ not exist : function return by the decorator should \
+ NOT be the same.' % module)
+ return
+ print('all modules in %s exist. Could not test %s' % (', '.join(modules),
+ sys._getframe().f_code.co_name))
+
+class TagTC(TestCase):
+
+ def setUp(self):
+ @tag('testing', 'bob')
+ def bob(a, b, c):
+ return (a + b) * c
+
+ self.func = bob
+
+ class TagTestTC(TestCase):
+ tags = Tags('one', 'two')
+
+ def test_one(self):
+ self.assertTrue(True)
+
+ @tag('two', 'three')
+ def test_two(self):
+ self.assertTrue(True)
+
+ @tag('three', inherit=False)
+ def test_three(self):
+ self.assertTrue(True)
+ self.cls = TagTestTC
+
+ def test_tag_decorator(self):
+ bob = self.func
+
+ self.assertEqual(bob(2, 3, 7), 35)
+ self.assertTrue(hasattr(bob, 'tags'))
+ self.assertSetEqual(bob.tags, set(['testing', 'bob']))
+
+ def test_tags_class(self):
+ tags = self.func.tags
+
+ self.assertTrue(tags['testing'])
+ self.assertFalse(tags['Not inside'])
+
+ def test_tags_match(self):
+ tags = self.func.tags
+
+ self.assertTrue(tags.match('testing'))
+ self.assertFalse(tags.match('other'))
+
+ self.assertFalse(tags.match('testing and coin'))
+ self.assertTrue(tags.match('testing or other'))
+
+ self.assertTrue(tags.match('not other'))
+
+ self.assertTrue(tags.match('not other or (testing and bibi)'))
+ self.assertTrue(tags.match('other or (testing and bob)'))
+
+ def test_tagged_class(self):
+ def options(tags):
+ class Options(object):
+ tags_pattern = tags
+ return Options()
+
+ tc = self.cls('test_one')
+
+ runner = SkipAwareTextTestRunner()
+ self.assertTrue(runner.does_match_tags(tc.test_one))
+ self.assertTrue(runner.does_match_tags(tc.test_two))
+ self.assertTrue(runner.does_match_tags(tc.test_three))
+
+ runner = SkipAwareTextTestRunner(options=options('one'))
+ self.assertTrue(runner.does_match_tags(tc.test_one))
+ self.assertTrue(runner.does_match_tags(tc.test_two))
+ self.assertFalse(runner.does_match_tags(tc.test_three))
+
+ runner = SkipAwareTextTestRunner(options=options('two'))
+ self.assertTrue(runner.does_match_tags(tc.test_one))
+ self.assertTrue(runner.does_match_tags(tc.test_two))
+ self.assertFalse(runner.does_match_tags(tc.test_three))
+
+ runner = SkipAwareTextTestRunner(options=options('three'))
+ self.assertFalse(runner.does_match_tags(tc.test_one))
+ self.assertTrue(runner.does_match_tags(tc.test_two))
+ self.assertTrue(runner.does_match_tags(tc.test_three))
+
+ runner = SkipAwareTextTestRunner(options=options('two or three'))
+ self.assertTrue(runner.does_match_tags(tc.test_one))
+ self.assertTrue(runner.does_match_tags(tc.test_two))
+ self.assertTrue(runner.does_match_tags(tc.test_three))
+
+ runner = SkipAwareTextTestRunner(options=options('two and three'))
+ self.assertFalse(runner.does_match_tags(tc.test_one))
+ self.assertTrue(runner.does_match_tags(tc.test_two))
+ self.assertFalse(runner.does_match_tags(tc.test_three))
+
+
+
+if __name__ == '__main__':
+ unittest_main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_textutils.py b/pymode/libs/logilab-common-1.4.1/test/unittest_textutils.py
new file mode 100644
index 00000000..330d49c2
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_textutils.py
@@ -0,0 +1,268 @@
+# -*- coding: utf-8 -*-
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+"""
+unit tests for module textutils
+squeleton generated by /home/syt/cvs_work/logilab/pyreverse/py2tests.py on Sep 08 at 09:1:31
+
+"""
+import doctest
+import re
+from os import linesep
+
+from logilab.common import textutils as tu
+from logilab.common.testlib import TestCase, unittest_main
+
+
+if linesep != '\n':
+ import re
+ LINE_RGX = re.compile(linesep)
+ def ulines(string):
+ return LINE_RGX.sub('\n', string)
+else:
+ def ulines(string):
+ return string
+
+class NormalizeTextTC(TestCase):
+
+ def test_known_values(self):
+ self.assertEqual(ulines(tu.normalize_text('''some really malformated
+ text.
+With some times some veeeeeeeeeeeeeeerrrrryyyyyyyyyyyyyyyyyyy loooooooooooooooooooooong linnnnnnnnnnnes
+
+and empty lines!
+ ''')),
+ '''some really malformated text. With some times some
+veeeeeeeeeeeeeeerrrrryyyyyyyyyyyyyyyyyyy loooooooooooooooooooooong
+linnnnnnnnnnnes
+
+and empty lines!''')
+ self.assertMultiLineEqual(ulines(tu.normalize_text('''\
+some ReST formated text
+=======================
+With some times some veeeeeeeeeeeeeeerrrrryyyyyyyyyyyyyyyyyyy loooooooooooooooooooooong linnnnnnnnnnnes
+and normal lines!
+
+another paragraph
+ ''', rest=True)),
+ '''\
+some ReST formated text
+=======================
+With some times some veeeeeeeeeeeeeeerrrrryyyyyyyyyyyyyyyyyyy
+loooooooooooooooooooooong linnnnnnnnnnnes
+and normal lines!
+
+another paragraph''')
+
+ def test_nonregr_unsplitable_word(self):
+ self.assertEqual(ulines(tu.normalize_text('''petit complement :
+
+http://www.plonefr.net/blog/archive/2005/10/30/tester-la-future-infrastructure-i18n
+''', 80)),
+ '''petit complement :
+
+http://www.plonefr.net/blog/archive/2005/10/30/tester-la-future-infrastructure-i18n''')
+
+
+ def test_nonregr_rest_normalize(self):
+ self.assertEqual(ulines(tu.normalize_text("""... Il est donc evident que tout le monde doit lire le compte-rendu de RSH et aller discuter avec les autres si c'est utile ou necessaire.
+ """, rest=True)), """... Il est donc evident que tout le monde doit lire le compte-rendu de RSH et
+aller discuter avec les autres si c'est utile ou necessaire.""")
+
+ def test_normalize_rest_paragraph(self):
+ self.assertEqual(ulines(tu.normalize_rest_paragraph("""**nico**: toto""")),
+ """**nico**: toto""")
+
+ def test_normalize_rest_paragraph2(self):
+ self.assertEqual(ulines(tu.normalize_rest_paragraph(""".. _tdm: http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Table-des-matieres/.20_adaa41fb-c125-4919-aece-049601e81c8e_0_0.pdf
+.. _extrait: http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Extrait-du-livre/.20_d6eed0be-0d36-4384-be59-2dd09e081012_0_0.pdf""", indent='> ')),
+ """> .. _tdm:
+> http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Table-des-matieres/.20_adaa41fb-c125-4919-aece-049601e81c8e_0_0.pdf
+> .. _extrait:
+> http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Extrait-du-livre/.20_d6eed0be-0d36-4384-be59-2dd09e081012_0_0.pdf""")
+
+ def test_normalize_paragraph2(self):
+ self.assertEqual(ulines(tu.normalize_paragraph(""".. _tdm: http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Table-des-matieres/.20_adaa41fb-c125-4919-aece-049601e81c8e_0_0.pdf
+.. _extrait: http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Extrait-du-livre/.20_d6eed0be-0d36-4384-be59-2dd09e081012_0_0.pdf""", indent='> ')),
+ """> .. _tdm:
+> http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Table-des-matieres/.20_adaa41fb-c125-4919-aece-049601e81c8e_0_0.pdf
+> .. _extrait:
+> http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Extrait-du-livre/.20_d6eed0be-0d36-4384-be59-2dd09e081012_0_0.pdf""")
+
+class NormalizeParagraphTC(TestCase):
+
+ def test_known_values(self):
+ self.assertEqual(ulines(tu.normalize_text("""This package contains test files shared by the logilab-common package. It isn't
+necessary to install this package unless you want to execute or look at
+the tests.""", indent=' ', line_len=70)),
+ """\
+ This package contains test files shared by the logilab-common
+ package. It isn't necessary to install this package unless you want
+ to execute or look at the tests.""")
+
+
+class GetCsvTC(TestCase):
+
+ def test_known(self):
+ self.assertEqual(tu.splitstrip('a, b,c '), ['a', 'b', 'c'])
+
+class UnitsTC(TestCase):
+
+ def setUp(self):
+ self.units = {
+ 'm': 60,
+ 'kb': 1024,
+ 'mb': 1024*1024,
+ }
+
+ def test_empty_base(self):
+ self.assertEqual(tu.apply_units('17', {}), 17)
+
+ def test_empty_inter(self):
+ def inter(value):
+ return int(float(value)) * 2
+ result = tu.apply_units('12.4', {}, inter=inter)
+ self.assertEqual(result, 12 * 2)
+ self.assertIsInstance(result, float)
+
+ def test_empty_final(self):
+ # int('12.4') raise value error
+ self.assertRaises(ValueError, tu.apply_units, '12.4', {}, final=int)
+
+ def test_empty_inter_final(self):
+ result = tu.apply_units('12.4', {}, inter=float, final=int)
+ self.assertEqual(result, 12)
+ self.assertIsInstance(result, int)
+
+ def test_blank_base(self):
+ result = tu.apply_units(' 42 ', {}, final=int)
+ self.assertEqual(result, 42)
+
+ def test_blank_space(self):
+ result = tu.apply_units(' 1 337 ', {}, final=int)
+ self.assertEqual(result, 1337)
+
+ def test_blank_coma(self):
+ result = tu.apply_units(' 4,298.42 ', {})
+ self.assertEqual(result, 4298.42)
+
+ def test_blank_mixed(self):
+ result = tu.apply_units('45, 317, 337', {}, final=int)
+ self.assertEqual(result, 45317337)
+
+ def test_unit_singleunit_singleletter(self):
+ result = tu.apply_units('15m', self.units)
+ self.assertEqual(result, 15 * self.units['m'] )
+
+ def test_unit_singleunit_multipleletter(self):
+ result = tu.apply_units('47KB', self.units)
+ self.assertEqual(result, 47 * self.units['kb'] )
+
+ def test_unit_singleunit_caseinsensitive(self):
+ result = tu.apply_units('47kb', self.units)
+ self.assertEqual(result, 47 * self.units['kb'] )
+
+ def test_unit_multipleunit(self):
+ result = tu.apply_units('47KB 1.5MB', self.units)
+ self.assertEqual(result, 47 * self.units['kb'] + 1.5 * self.units['mb'])
+
+ def test_unit_with_blank(self):
+ result = tu.apply_units('1 000 KB', self.units)
+ self.assertEqual(result, 1000 * self.units['kb'])
+
+ def test_unit_wrong_input(self):
+ self.assertRaises(ValueError, tu.apply_units, '', self.units)
+ self.assertRaises(ValueError, tu.apply_units, 'wrong input', self.units)
+ self.assertRaises(ValueError, tu.apply_units, 'wrong13 input', self.units)
+ self.assertRaises(ValueError, tu.apply_units, 'wrong input42', self.units)
+
+RGX = re.compile('abcd')
+class PrettyMatchTC(TestCase):
+
+ def test_known(self):
+ string = 'hiuherabcdef'
+ self.assertEqual(ulines(tu.pretty_match(RGX.search(string), string)),
+ 'hiuherabcdef\n ^^^^')
+ def test_known_values_1(self):
+ rgx = re.compile('(to*)')
+ string = 'toto'
+ match = rgx.search(string)
+ self.assertEqual(ulines(tu.pretty_match(match, string)), '''toto
+^^''')
+
+ def test_known_values_2(self):
+ rgx = re.compile('(to*)')
+ string = ''' ... ... to to
+ ... ... '''
+ match = rgx.search(string)
+ self.assertEqual(ulines(tu.pretty_match(match, string)), ''' ... ... to to
+ ^^
+ ... ...''')
+
+
+
+class UnquoteTC(TestCase):
+ def test(self):
+ self.assertEqual(tu.unquote('"toto"'), 'toto')
+ self.assertEqual(tu.unquote("'l'inenarrable toto'"), "l'inenarrable toto")
+ self.assertEqual(tu.unquote("no quote"), "no quote")
+
+
+class ColorizeAnsiTC(TestCase):
+ def test_known(self):
+ self.assertEqual(tu.colorize_ansi('hello', 'blue', 'strike'), '\x1b[9;34mhello\x1b[0m')
+ self.assertEqual(tu.colorize_ansi('hello', style='strike, inverse'), '\x1b[9;7mhello\x1b[0m')
+ self.assertEqual(tu.colorize_ansi('hello', None, None), 'hello')
+ self.assertEqual(tu.colorize_ansi('hello', '', ''), 'hello')
+ def test_raise(self):
+ self.assertRaises(KeyError, tu.colorize_ansi, 'hello', 'bleu', None)
+ self.assertRaises(KeyError, tu.colorize_ansi, 'hello', None, 'italique')
+
+
+class UnormalizeTC(TestCase):
+ def test_unormalize_no_substitute(self):
+ data = [(u'\u0153nologie', u'oenologie'),
+ (u'\u0152nologie', u'OEnologie'),
+ (u'l\xf8to', u'loto'),
+ (u'été', u'ete'),
+ (u'àèùéïîôêç', u'aeueiioec'),
+ (u'ÀÈÙÉÏÎÔÊÇ', u'AEUEIIOEC'),
+ (u'\xa0', u' '), # NO-BREAK SPACE managed by NFKD decomposition
+ (u'\u0154', u'R'),
+ (u'Pointe d\u2019Yves', u"Pointe d'Yves"),
+ (u'Bordeaux\u2013Mérignac', u'Bordeaux-Merignac'),
+ ]
+ for input, output in data:
+ yield self.assertEqual, tu.unormalize(input), output
+
+ def test_unormalize_substitute(self):
+ self.assertEqual(tu.unormalize(u'ab \u8000 cd', substitute='_'),
+ 'ab _ cd')
+
+ def test_unormalize_backward_compat(self):
+ self.assertRaises(ValueError, tu.unormalize, u"\u8000")
+ self.assertEqual(tu.unormalize(u"\u8000", substitute=''), u'')
+
+
+def load_tests(loader, tests, ignore):
+ tests.addTests(doctest.DocTestSuite(tu))
+ return tests
+
+
+if __name__ == '__main__':
+ unittest_main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_tree.py b/pymode/libs/logilab-common-1.4.1/test/unittest_tree.py
new file mode 100644
index 00000000..ea5af81a
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_tree.py
@@ -0,0 +1,247 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+"""
+unit tests for module logilab.common.tree
+squeleton generated by /home/syt/bin/py2tests on Jan 20 at 10:43:25
+"""
+
+from logilab.common.testlib import TestCase, unittest_main
+from logilab.common.tree import *
+
+tree = ('root', (
+ ('child_1_1', (
+ ('child_2_1', ()), ('child_2_2', (
+ ('child_3_1', ()),
+ )))),
+ ('child_1_2', (('child_2_3', ()),))))
+
+def make_tree(tuple):
+ n = Node(tuple[0])
+ for child in tuple[1]:
+ n.append(make_tree(child))
+ return n
+
+class Node_ClassTest(TestCase):
+ """ a basic tree node, caracterised by an id"""
+ def setUp(self):
+ """ called before each test from this class """
+ self.o = make_tree(tree)
+
+
+ def test_flatten(self):
+ result = [r.id for r in self.o.flatten()]
+ expected = ['root', 'child_1_1', 'child_2_1', 'child_2_2', 'child_3_1', 'child_1_2', 'child_2_3']
+ self.assertListEqual(result, expected)
+
+ def test_flatten_with_outlist(self):
+ resultnodes = []
+ self.o.flatten(resultnodes)
+ result = [r.id for r in resultnodes]
+ expected = ['root', 'child_1_1', 'child_2_1', 'child_2_2', 'child_3_1', 'child_1_2', 'child_2_3']
+ self.assertListEqual(result, expected)
+
+
+ def test_known_values_remove(self):
+ """
+ remove a child node
+ """
+ self.o.remove(self.o.get_node_by_id('child_1_1'))
+ self.assertRaises(NodeNotFound, self.o.get_node_by_id, 'child_1_1')
+
+ def test_known_values_replace(self):
+ """
+ replace a child node with another
+ """
+ self.o.replace(self.o.get_node_by_id('child_1_1'), Node('hoho'))
+ self.assertRaises(NodeNotFound, self.o.get_node_by_id, 'child_1_1')
+ self.assertEqual(self.o.get_node_by_id('hoho'), self.o.children[0])
+
+ def test_known_values_get_sibling(self):
+ """
+ return the sibling node that has given id
+ """
+ self.assertEqual(self.o.children[0].get_sibling('child_1_2'), self.o.children[1], None)
+
+ def test_raise_get_sibling_NodeNotFound(self):
+ self.assertRaises(NodeNotFound, self.o.children[0].get_sibling, 'houhou')
+
+ def test_known_values_get_node_by_id(self):
+ """
+ return node in whole hierarchy that has given id
+ """
+ self.assertEqual(self.o.get_node_by_id('child_1_1'), self.o.children[0])
+
+ def test_raise_get_node_by_id_NodeNotFound(self):
+ self.assertRaises(NodeNotFound, self.o.get_node_by_id, 'houhou')
+
+ def test_known_values_get_child_by_id(self):
+ """
+ return child of given id
+ """
+ self.assertEqual(self.o.get_child_by_id('child_2_1', recurse=1), self.o.children[0].children[0])
+
+ def test_raise_get_child_by_id_NodeNotFound(self):
+ self.assertRaises(NodeNotFound, self.o.get_child_by_id, nid='child_2_1')
+ self.assertRaises(NodeNotFound, self.o.get_child_by_id, 'houhou')
+
+ def test_known_values_get_child_by_path(self):
+ """
+ return child of given path (path is a list of ids)
+ """
+ self.assertEqual(self.o.get_child_by_path(['root', 'child_1_1', 'child_2_1']), self.o.children[0].children[0])
+
+ def test_raise_get_child_by_path_NodeNotFound(self):
+ self.assertRaises(NodeNotFound, self.o.get_child_by_path, ['child_1_1', 'child_2_11'])
+
+ def test_known_values_depth_down(self):
+ """
+ return depth of this node in the tree
+ """
+ self.assertEqual(self.o.depth_down(), 4)
+ self.assertEqual(self.o.get_child_by_id('child_2_1', True).depth_down(), 1)
+
+ def test_known_values_depth(self):
+ """
+ return depth of this node in the tree
+ """
+ self.assertEqual(self.o.depth(), 0)
+ self.assertEqual(self.o.get_child_by_id('child_2_1', True).depth(), 2)
+
+ def test_known_values_width(self):
+ """
+ return depth of this node in the tree
+ """
+ self.assertEqual(self.o.width(), 3)
+ self.assertEqual(self.o.get_child_by_id('child_2_1', True).width(), 1)
+
+ def test_known_values_root(self):
+ """
+ return the root node of the tree
+ """
+ self.assertEqual(self.o.get_child_by_id('child_2_1', True).root(), self.o)
+
+ def test_known_values_leaves(self):
+ """
+ return a list with all the leaf nodes descendant from this task
+ """
+ self.assertEqual(self.o.leaves(), [self.o.get_child_by_id('child_2_1', True),
+ self.o.get_child_by_id('child_3_1', True),
+ self.o.get_child_by_id('child_2_3', True)])
+
+ def test_known_values_lineage(self):
+ c31 = self.o.get_child_by_id('child_3_1', True)
+ self.assertEqual(c31.lineage(), [self.o.get_child_by_id('child_3_1', True),
+ self.o.get_child_by_id('child_2_2', True),
+ self.o.get_child_by_id('child_1_1', True),
+ self.o])
+
+
+class post_order_list_FunctionTest(TestCase):
+ """"""
+ def setUp(self):
+ """ called before each test from this class """
+ self.o = make_tree(tree)
+
+ def test_known_values_post_order_list(self):
+ """
+ create a list with tree nodes for which the function returned true
+ in a post order foashion
+ """
+ L = ['child_2_1', 'child_3_1', 'child_2_2', 'child_1_1', 'child_2_3', 'child_1_2', 'root']
+ l = [n.id for n in post_order_list(self.o)]
+ self.assertEqual(l, L, l)
+
+ def test_known_values_post_order_list2(self):
+ """
+ create a list with tree nodes for which the function returned true
+ in a post order foashion
+ """
+ def filter(node):
+ if node.id == 'child_2_2':
+ return 0
+ return 1
+ L = ['child_2_1', 'child_1_1', 'child_2_3', 'child_1_2', 'root']
+ l = [n.id for n in post_order_list(self.o, filter)]
+ self.assertEqual(l, L, l)
+
+
+class PostfixedDepthFirstIterator_ClassTest(TestCase):
+ """"""
+ def setUp(self):
+ """ called before each test from this class """
+ self.o = make_tree(tree)
+
+ def test_known_values_next(self):
+ L = ['child_2_1', 'child_3_1', 'child_2_2', 'child_1_1', 'child_2_3', 'child_1_2', 'root']
+ iter = PostfixedDepthFirstIterator(self.o)
+ o = next(iter)
+ i = 0
+ while o:
+ self.assertEqual(o.id, L[i])
+ o = next(iter)
+ i += 1
+
+
+class pre_order_list_FunctionTest(TestCase):
+ """"""
+ def setUp(self):
+ """ called before each test from this class """
+ self.o = make_tree(tree)
+
+ def test_known_values_pre_order_list(self):
+ """
+ create a list with tree nodes for which the function returned true
+ in a pre order fashion
+ """
+ L = ['root', 'child_1_1', 'child_2_1', 'child_2_2', 'child_3_1', 'child_1_2', 'child_2_3']
+ l = [n.id for n in pre_order_list(self.o)]
+ self.assertEqual(l, L, l)
+
+ def test_known_values_pre_order_list2(self):
+ """
+ create a list with tree nodes for which the function returned true
+ in a pre order fashion
+ """
+ def filter(node):
+ if node.id == 'child_2_2':
+ return 0
+ return 1
+ L = ['root', 'child_1_1', 'child_2_1', 'child_1_2', 'child_2_3']
+ l = [n.id for n in pre_order_list(self.o, filter)]
+ self.assertEqual(l, L, l)
+
+
+class PrefixedDepthFirstIterator_ClassTest(TestCase):
+ """"""
+ def setUp(self):
+ """ called before each test from this class """
+ self.o = make_tree(tree)
+
+ def test_known_values_next(self):
+ L = ['root', 'child_1_1', 'child_2_1', 'child_2_2', 'child_3_1', 'child_1_2', 'child_2_3']
+ iter = PrefixedDepthFirstIterator(self.o)
+ o = next(iter)
+ i = 0
+ while o:
+ self.assertEqual(o.id, L[i])
+ o = next(iter)
+ i += 1
+
+
+if __name__ == '__main__':
+ unittest_main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_umessage.py b/pymode/libs/logilab-common-1.4.1/test/unittest_umessage.py
new file mode 100644
index 00000000..2841172a
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_umessage.py
@@ -0,0 +1,94 @@
+# encoding: iso-8859-15
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+import sys
+import email
+from os.path import join, dirname, abspath
+
+from six import text_type
+
+from logilab.common.testlib import TestCase, unittest_main
+from logilab.common.umessage import UMessage, decode_QP, message_from_string
+
+DATA = join(dirname(abspath(__file__)), 'data')
+
+class UMessageTC(TestCase):
+
+ def setUp(self):
+ if sys.version_info >= (3, 2):
+ import io
+ msg1 = email.message_from_file(io.open(join(DATA, 'test1.msg'), encoding='utf8'))
+ msg2 = email.message_from_file(io.open(join(DATA, 'test2.msg'), encoding='utf8'))
+ else:
+ msg1 = email.message_from_file(open(join(DATA, 'test1.msg')))
+ msg2 = email.message_from_file(open(join(DATA, 'test2.msg')))
+ self.umessage1 = UMessage(msg1)
+ self.umessage2 = UMessage(msg2)
+
+ def test_get_subject(self):
+ subj = self.umessage2.get('Subject')
+ self.assertEqual(type(subj), text_type)
+ self.assertEqual(subj, u' LA MER')
+
+ def test_get_all(self):
+ to = self.umessage2.get_all('To')
+ self.assertEqual(type(to[0]), text_type)
+ self.assertEqual(to, [u'lment accents '])
+
+ def test_get_payload_no_multi(self):
+ payload = self.umessage1.get_payload()
+ self.assertEqual(type(payload), text_type)
+
+ def test_get_payload_decode(self):
+ msg = """\
+MIME-Version: 1.0
+Content-Type: text/plain; charset="utf-8"
+Content-Transfer-Encoding: base64
+Subject: =?utf-8?q?b=C3=AFjour?=
+From: =?utf-8?q?oim?=
+Reply-to: =?utf-8?q?oim?= , =?utf-8?q?BimBam?=
+X-CW: data
+To: test@logilab.fr
+Date: now
+
+dW4gcGV0aXQgY8O2dWNvdQ==
+"""
+ msg = message_from_string(msg)
+ self.assertEqual(msg.get_payload(decode=True), u'un petit cucou')
+
+ def test_decode_QP(self):
+ test_line = '=??b?UmFwaGHrbA==?= DUPONT'
+ test = decode_QP(test_line)
+ self.assertEqual(type(test), text_type)
+ self.assertEqual(test, u'Raphal DUPONT')
+
+ def test_decode_QP_utf8(self):
+ test_line = '=?utf-8?q?o=C3=AEm?= '
+ test = decode_QP(test_line)
+ self.assertEqual(type(test), text_type)
+ self.assertEqual(test, u'om ')
+
+ def test_decode_QP_ascii(self):
+ test_line = 'test '
+ test = decode_QP(test_line)
+ self.assertEqual(type(test), text_type)
+ self.assertEqual(test, u'test ')
+
+
+if __name__ == '__main__':
+ unittest_main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_ureports_html.py b/pymode/libs/logilab-common-1.4.1/test/unittest_ureports_html.py
new file mode 100644
index 00000000..2298eec7
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_ureports_html.py
@@ -0,0 +1,63 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+'''unit tests for ureports.html_writer
+'''
+
+
+from utils import WriterTC
+from logilab.common.testlib import TestCase, unittest_main
+from logilab.common.ureports.html_writer import *
+
+class HTMLWriterTC(TestCase, WriterTC):
+
+ def setUp(self):
+ self.writer = HTMLWriter(1)
+
+ # Section tests ###########################################################
+ section_base = '''
+
Section title
+
Section\'s description.
+Blabla bla
+'''
+ section_nested = '''\n
Section title
\n
Section\'s description.\nBlabla bla
\n
Subsection
\n
Sub section description
\n
\n'''
+
+ # List tests ##############################################################
+ list_base = '''\n- item1
\n- item2
\n- item3
\n- item4
\n
\n'''
+
+ nested_list = '''
+an other point
+
+'''
+
+ # Table tests #############################################################
+ table_base = '''\n\n| head1 | \nhead2 | \n
\n\n| cell1 | \ncell2 | \n
\n
\n'''
+ field_table = '''\n\n| f1 | \nv1 | \n
\n\n| f22 | \nv22 | \n
\n\n| f333 | \nv333 | \n
\n
\n'''
+ advanced_table = '''\n\n\n| f1 | \nv1 | \n
\n\n| f22 | \nv22 | \n
\n\n| f333 | \nv333 | \n
\n\n| toi perdu ? | \n | \n
\n
\n'''
+
+
+ # VerbatimText tests ######################################################
+ verbatim_base = '''blablabla
'''
+
+if __name__ == '__main__':
+ unittest_main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_ureports_text.py b/pymode/libs/logilab-common-1.4.1/test/unittest_ureports_text.py
new file mode 100644
index 00000000..dd39dd84
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_ureports_text.py
@@ -0,0 +1,104 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+'''unit tests for ureports.text_writer
+'''
+
+
+from utils import WriterTC
+from logilab.common.testlib import TestCase, unittest_main
+from logilab.common.ureports.text_writer import TextWriter
+
+class TextWriterTC(TestCase, WriterTC):
+ def setUp(self):
+ self.writer = TextWriter()
+
+ # Section tests ###########################################################
+ section_base = '''
+Section title
+=============
+Section\'s description.
+Blabla bla
+
+'''
+ section_nested = '''
+Section title
+=============
+Section\'s description.
+Blabla bla
+
+Subsection
+----------
+Sub section description
+
+
+'''
+
+ # List tests ##############################################################
+ list_base = '''
+* item1
+* item2
+* item3
+* item4'''
+
+ nested_list = '''
+* blabla
+ - 1
+ - 2
+ - 3
+
+* an other point'''
+
+ # Table tests #############################################################
+ table_base = '''
++------+------+
+|head1 |head2 |
++------+------+
+|cell1 |cell2 |
++------+------+
+
+'''
+ field_table = '''
+f1 : v1
+f22 : v22
+f333: v333
+'''
+ advanced_table = '''
++---------------+------+
+|field |value |
++===============+======+
+|f1 |v1 |
++---------------+------+
+|f22 |v22 |
++---------------+------+
+|f333 |v333 |
++---------------+------+
+|`toi perdu ?`_ | |
++---------------+------+
+
+'''
+
+
+ # VerbatimText tests ######################################################
+ verbatim_base = '''::
+
+ blablabla
+
+'''
+
+if __name__ == '__main__':
+ unittest_main()
diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_xmlutils.py b/pymode/libs/logilab-common-1.4.1/test/unittest_xmlutils.py
new file mode 100644
index 00000000..3d82da93
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/unittest_xmlutils.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+
+from logilab.common.testlib import TestCase, unittest_main
+from logilab.common.xmlutils import parse_pi_data
+
+
+class ProcessingInstructionDataParsingTest(TestCase):
+ def test_empty_pi(self):
+ """
+ Tests the parsing of the data of an empty processing instruction.
+ """
+ pi_data = u" \t \n "
+ data = parse_pi_data(pi_data)
+ self.assertEqual(data, {})
+
+ def test_simple_pi_with_double_quotes(self):
+ """
+ Tests the parsing of the data of a simple processing instruction using
+ double quotes for embedding the value.
+ """
+ pi_data = u""" \t att="value"\n """
+ data = parse_pi_data(pi_data)
+ self.assertEqual(data, {u"att": u"value"})
+
+ def test_simple_pi_with_simple_quotes(self):
+ """
+ Tests the parsing of the data of a simple processing instruction using
+ simple quotes for embedding the value.
+ """
+ pi_data = u""" \t att='value'\n """
+ data = parse_pi_data(pi_data)
+ self.assertEqual(data, {u"att": u"value"})
+
+ def test_complex_pi_with_different_quotes(self):
+ """
+ Tests the parsing of the data of a complex processing instruction using
+ simple quotes or double quotes for embedding the values.
+ """
+ pi_data = u""" \t att='value'\n att2="value2" att3='value3'"""
+ data = parse_pi_data(pi_data)
+ self.assertEqual(data, {u"att": u"value", u"att2": u"value2",
+ u"att3": u"value3"})
+
+ def test_pi_with_non_attribute_data(self):
+ """
+ Tests the parsing of the data of a complex processing instruction
+ containing non-attribute data.
+ """
+ pi_data = u""" \t keyword att1="value1" """
+ data = parse_pi_data(pi_data)
+ self.assertEqual(data, {u"keyword": None, u"att1": u"value1"})
+
+
+# definitions for automatic unit testing
+
+if __name__ == '__main__':
+ unittest_main()
+
diff --git a/pymode/libs/logilab-common-1.4.1/test/utils.py b/pymode/libs/logilab-common-1.4.1/test/utils.py
new file mode 100644
index 00000000..ca1730eb
--- /dev/null
+++ b/pymode/libs/logilab-common-1.4.1/test/utils.py
@@ -0,0 +1,96 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see .
+'''unit tests utilities for ureports
+'''
+
+from __future__ import print_function
+
+import sys
+from io import StringIO
+buffers = [StringIO]
+if sys.version_info < (3, 0):
+ from cStringIO import StringIO as cStringIO
+ from StringIO import StringIO as pStringIO
+ buffers += [cStringIO, pStringIO]
+
+from logilab.common.ureports.nodes import *
+
+class WriterTC:
+ def _test_output(self, test_id, layout, msg=None):
+ for buffercls in buffers:
+ buffer = buffercls()
+ self.writer.format(layout, buffer)
+ got = buffer.getvalue()
+ expected = getattr(self, test_id)
+ try:
+ self.assertMultiLineEqual(got, expected)
+ except:
+ print('**** using a %s' % buffer.__class__)
+ print('**** got for %s' % test_id)
+ print(got)
+ print('**** while expected')
+ print(expected)
+ print('****')
+ raise
+
+ def test_section(self):
+ layout = Section('Section title',
+ 'Section\'s description.\nBlabla bla')
+ self._test_output('section_base', layout)
+ layout.append(Section('Subsection', 'Sub section description'))
+ self._test_output('section_nested', layout)
+
+ def test_verbatim(self):
+ layout = VerbatimText('blablabla')
+ self._test_output('verbatim_base', layout)
+
+
+ def test_list(self):
+ layout = List(children=('item1', 'item2', 'item3', 'item4'))
+ self._test_output('list_base', layout)
+
+ def test_nested_list(self):
+ layout = List(children=(Paragraph(("blabla", List(children=('1', "2", "3")))),
+ "an other point"))
+ self._test_output('nested_list', layout)
+
+
+ def test_table(self):
+ layout = Table(cols=2, children=('head1', 'head2', 'cell1', 'cell2'))
+ self._test_output('table_base', layout)
+
+ def test_field_table(self):
+ table = Table(cols=2, klass='field', id='mytable')
+ for field, value in (('f1', 'v1'), ('f22', 'v22'), ('f333', 'v333')):
+ table.append(Text(field))
+ table.append(Text(value))
+ self._test_output('field_table', table)
+
+ def test_advanced_table(self):
+ table = Table(cols=2, klass='whatever', id='mytable', rheaders=1)
+ for field, value in (('field', 'value'), ('f1', 'v1'), ('f22', 'v22'), ('f333', 'v333')):
+ table.append(Text(field))
+ table.append(Text(value))
+ table.append(Link('http://www.perdu.com', 'toi perdu ?'))
+ table.append(Text(''))
+ self._test_output('advanced_table', table)
+
+
+## def test_image(self):
+## layout = Verbatim('blablabla')
+## self._test_output('verbatim_base', layout)
diff --git a/pymode/libs/logilab/__init__.py b/pymode/libs/logilab/__init__.py
deleted file mode 100644
index 8b137891..00000000
--- a/pymode/libs/logilab/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/pymode/libs/logilab/common/testlib.py b/pymode/libs/logilab/common/testlib.py
deleted file mode 100644
index a6b4b1e1..00000000
--- a/pymode/libs/logilab/common/testlib.py
+++ /dev/null
@@ -1,1338 +0,0 @@
-# -*- coding: utf-8 -*-
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of logilab-common.
-#
-# logilab-common is free software: you can redistribute it and/or modify it under
-# the terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option) any
-# later version.
-#
-# logilab-common is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with logilab-common. If not, see .
-"""Run tests.
-
-This will find all modules whose name match a given prefix in the test
-directory, and run them. Various command line options provide
-additional facilities.
-
-Command line options:
-
- -v verbose -- run tests in verbose mode with output to stdout
- -q quiet -- don't print anything except if a test fails
- -t testdir -- directory where the tests will be found
- -x exclude -- add a test to exclude
- -p profile -- profiled execution
- -d dbc -- enable design-by-contract
- -m match -- only run test matching the tag pattern which follow
-
-If no non-option arguments are present, prefixes used are 'test',
-'regrtest', 'smoketest' and 'unittest'.
-
-"""
-
-from __future__ import print_function
-
-__docformat__ = "restructuredtext en"
-# modified copy of some functions from test/regrtest.py from PyXml
-# disable camel case warning
-# pylint: disable=C0103
-
-import sys
-import os, os.path as osp
-import re
-import traceback
-import inspect
-import difflib
-import tempfile
-import math
-import warnings
-from shutil import rmtree
-from operator import itemgetter
-from itertools import dropwhile
-from inspect import isgeneratorfunction
-
-from six import string_types
-from six.moves import builtins, range, configparser, input
-
-from logilab.common.deprecation import deprecated
-
-import unittest as unittest_legacy
-if not getattr(unittest_legacy, "__package__", None):
- try:
- import unittest2 as unittest
- from unittest2 import SkipTest
- except ImportError:
- raise ImportError("You have to install python-unittest2 to use %s" % __name__)
-else:
- import unittest
- from unittest import SkipTest
-
-from functools import wraps
-
-from logilab.common.debugger import Debugger, colorize_source
-from logilab.common.decorators import cached, classproperty
-from logilab.common import textutils
-
-
-__all__ = ['main', 'unittest_main', 'find_tests', 'run_test', 'spawn']
-
-DEFAULT_PREFIXES = ('test', 'regrtest', 'smoketest', 'unittest',
- 'func', 'validation')
-
-is_generator = deprecated('[lgc 0.63] use inspect.isgeneratorfunction')(isgeneratorfunction)
-
-# used by unittest to count the number of relevant levels in the traceback
-__unittest = 1
-
-
-def with_tempdir(callable):
- """A decorator ensuring no temporary file left when the function return
- Work only for temporary file created with the tempfile module"""
- if isgeneratorfunction(callable):
- def proxy(*args, **kwargs):
- old_tmpdir = tempfile.gettempdir()
- new_tmpdir = tempfile.mkdtemp(prefix="temp-lgc-")
- tempfile.tempdir = new_tmpdir
- try:
- for x in callable(*args, **kwargs):
- yield x
- finally:
- try:
- rmtree(new_tmpdir, ignore_errors=True)
- finally:
- tempfile.tempdir = old_tmpdir
- return proxy
-
- @wraps(callable)
- def proxy(*args, **kargs):
-
- old_tmpdir = tempfile.gettempdir()
- new_tmpdir = tempfile.mkdtemp(prefix="temp-lgc-")
- tempfile.tempdir = new_tmpdir
- try:
- return callable(*args, **kargs)
- finally:
- try:
- rmtree(new_tmpdir, ignore_errors=True)
- finally:
- tempfile.tempdir = old_tmpdir
- return proxy
-
-def in_tempdir(callable):
- """A decorator moving the enclosed function inside the tempfile.tempfdir
- """
- @wraps(callable)
- def proxy(*args, **kargs):
-
- old_cwd = os.getcwd()
- os.chdir(tempfile.tempdir)
- try:
- return callable(*args, **kargs)
- finally:
- os.chdir(old_cwd)
- return proxy
-
-def within_tempdir(callable):
- """A decorator run the enclosed function inside a tmpdir removed after execution
- """
- proxy = with_tempdir(in_tempdir(callable))
- proxy.__name__ = callable.__name__
- return proxy
-
-def find_tests(testdir,
- prefixes=DEFAULT_PREFIXES, suffix=".py",
- excludes=(),
- remove_suffix=True):
- """
- Return a list of all applicable test modules.
- """
- tests = []
- for name in os.listdir(testdir):
- if not suffix or name.endswith(suffix):
- for prefix in prefixes:
- if name.startswith(prefix):
- if remove_suffix and name.endswith(suffix):
- name = name[:-len(suffix)]
- if name not in excludes:
- tests.append(name)
- tests.sort()
- return tests
-
-
-## PostMortem Debug facilities #####
-def start_interactive_mode(result):
- """starts an interactive shell so that the user can inspect errors
- """
- debuggers = result.debuggers
- descrs = result.error_descrs + result.fail_descrs
- if len(debuggers) == 1:
- # don't ask for test name if there's only one failure
- debuggers[0].start()
- else:
- while True:
- testindex = 0
- print("Choose a test to debug:")
- # order debuggers in the same way than errors were printed
- print("\n".join(['\t%s : %s' % (i, descr) for i, (_, descr)
- in enumerate(descrs)]))
- print("Type 'exit' (or ^D) to quit")
- print()
- try:
- todebug = input('Enter a test name: ')
- if todebug.strip().lower() == 'exit':
- print()
- break
- else:
- try:
- testindex = int(todebug)
- debugger = debuggers[descrs[testindex][0]]
- except (ValueError, IndexError):
- print("ERROR: invalid test number %r" % (todebug, ))
- else:
- debugger.start()
- except (EOFError, KeyboardInterrupt):
- print()
- break
-
-
-# test utils ##################################################################
-
-class SkipAwareTestResult(unittest._TextTestResult):
-
- def __init__(self, stream, descriptions, verbosity,
- exitfirst=False, pdbmode=False, cvg=None, colorize=False):
- super(SkipAwareTestResult, self).__init__(stream,
- descriptions, verbosity)
- self.skipped = []
- self.debuggers = []
- self.fail_descrs = []
- self.error_descrs = []
- self.exitfirst = exitfirst
- self.pdbmode = pdbmode
- self.cvg = cvg
- self.colorize = colorize
- self.pdbclass = Debugger
- self.verbose = verbosity > 1
-
- def descrs_for(self, flavour):
- return getattr(self, '%s_descrs' % flavour.lower())
-
- def _create_pdb(self, test_descr, flavour):
- self.descrs_for(flavour).append( (len(self.debuggers), test_descr) )
- if self.pdbmode:
- self.debuggers.append(self.pdbclass(sys.exc_info()[2]))
-
- def _iter_valid_frames(self, frames):
- """only consider non-testlib frames when formatting traceback"""
- lgc_testlib = osp.abspath(__file__)
- std_testlib = osp.abspath(unittest.__file__)
- invalid = lambda fi: osp.abspath(fi[1]) in (lgc_testlib, std_testlib)
- for frameinfo in dropwhile(invalid, frames):
- yield frameinfo
-
- def _exc_info_to_string(self, err, test):
- """Converts a sys.exc_info()-style tuple of values into a string.
-
- This method is overridden here because we want to colorize
- lines if --color is passed, and display local variables if
- --verbose is passed
- """
- exctype, exc, tb = err
- output = ['Traceback (most recent call last)']
- frames = inspect.getinnerframes(tb)
- colorize = self.colorize
- frames = enumerate(self._iter_valid_frames(frames))
- for index, (frame, filename, lineno, funcname, ctx, ctxindex) in frames:
- filename = osp.abspath(filename)
- if ctx is None: # pyc files or C extensions for instance
- source = ''
- else:
- source = ''.join(ctx)
- if colorize:
- filename = textutils.colorize_ansi(filename, 'magenta')
- source = colorize_source(source)
- output.append(' File "%s", line %s, in %s' % (filename, lineno, funcname))
- output.append(' %s' % source.strip())
- if self.verbose:
- output.append('%r == %r' % (dir(frame), test.__module__))
- output.append('')
- output.append(' ' + ' local variables '.center(66, '-'))
- for varname, value in sorted(frame.f_locals.items()):
- output.append(' %s: %r' % (varname, value))
- if varname == 'self': # special handy processing for self
- for varname, value in sorted(vars(value).items()):
- output.append(' self.%s: %r' % (varname, value))
- output.append(' ' + '-' * 66)
- output.append('')
- output.append(''.join(traceback.format_exception_only(exctype, exc)))
- return '\n'.join(output)
-
- def addError(self, test, err):
- """err -> (exc_type, exc, tcbk)"""
- exc_type, exc, _ = err
- if isinstance(exc, SkipTest):
- assert exc_type == SkipTest
- self.addSkip(test, exc)
- else:
- if self.exitfirst:
- self.shouldStop = True
- descr = self.getDescription(test)
- super(SkipAwareTestResult, self).addError(test, err)
- self._create_pdb(descr, 'error')
-
- def addFailure(self, test, err):
- if self.exitfirst:
- self.shouldStop = True
- descr = self.getDescription(test)
- super(SkipAwareTestResult, self).addFailure(test, err)
- self._create_pdb(descr, 'fail')
-
- def addSkip(self, test, reason):
- self.skipped.append((test, reason))
- if self.showAll:
- self.stream.writeln("SKIPPED")
- elif self.dots:
- self.stream.write('S')
-
- def printErrors(self):
- super(SkipAwareTestResult, self).printErrors()
- self.printSkippedList()
-
- def printSkippedList(self):
- # format (test, err) compatible with unittest2
- for test, err in self.skipped:
- descr = self.getDescription(test)
- self.stream.writeln(self.separator1)
- self.stream.writeln("%s: %s" % ('SKIPPED', descr))
- self.stream.writeln("\t%s" % err)
-
- def printErrorList(self, flavour, errors):
- for (_, descr), (test, err) in zip(self.descrs_for(flavour), errors):
- self.stream.writeln(self.separator1)
- self.stream.writeln("%s: %s" % (flavour, descr))
- self.stream.writeln(self.separator2)
- self.stream.writeln(err)
- self.stream.writeln('no stdout'.center(len(self.separator2)))
- self.stream.writeln('no stderr'.center(len(self.separator2)))
-
-# Add deprecation warnings about new api used by module level fixtures in unittest2
-# http://www.voidspace.org.uk/python/articles/unittest2.shtml#setupmodule-and-teardownmodule
-class _DebugResult(object): # simplify import statement among unittest flavors..
- "Used by the TestSuite to hold previous class when running in debug."
- _previousTestClass = None
- _moduleSetUpFailed = False
- shouldStop = False
-
-# backward compatibility: TestSuite might be imported from lgc.testlib
-TestSuite = unittest.TestSuite
-
-class keywords(dict):
- """Keyword args (**kwargs) support for generative tests."""
-
-class starargs(tuple):
- """Variable arguments (*args) for generative tests."""
- def __new__(cls, *args):
- return tuple.__new__(cls, args)
-
-unittest_main = unittest.main
-
-
-class InnerTestSkipped(SkipTest):
- """raised when a test is skipped"""
- pass
-
-def parse_generative_args(params):
- args = []
- varargs = ()
- kwargs = {}
- flags = 0 # 2 <=> starargs, 4 <=> kwargs
- for param in params:
- if isinstance(param, starargs):
- varargs = param
- if flags:
- raise TypeError('found starargs after keywords !')
- flags |= 2
- args += list(varargs)
- elif isinstance(param, keywords):
- kwargs = param
- if flags & 4:
- raise TypeError('got multiple keywords parameters')
- flags |= 4
- elif flags & 2 or flags & 4:
- raise TypeError('found parameters after kwargs or args')
- else:
- args.append(param)
-
- return args, kwargs
-
-
-class InnerTest(tuple):
- def __new__(cls, name, *data):
- instance = tuple.__new__(cls, data)
- instance.name = name
- return instance
-
-class Tags(set):
- """A set of tag able validate an expression"""
-
- def __init__(self, *tags, **kwargs):
- self.inherit = kwargs.pop('inherit', True)
- if kwargs:
- raise TypeError("%s are an invalid keyword argument for this function" % kwargs.keys())
-
- if len(tags) == 1 and not isinstance(tags[0], string_types):
- tags = tags[0]
- super(Tags, self).__init__(tags, **kwargs)
-
- def __getitem__(self, key):
- return key in self
-
- def match(self, exp):
- return eval(exp, {}, self)
-
- def __or__(self, other):
- return Tags(*super(Tags, self).__or__(other))
-
-
-# duplicate definition from unittest2 of the _deprecate decorator
-def _deprecate(original_func):
- def deprecated_func(*args, **kwargs):
- warnings.warn(
- ('Please use %s instead.' % original_func.__name__),
- DeprecationWarning, 2)
- return original_func(*args, **kwargs)
- return deprecated_func
-
-class TestCase(unittest.TestCase):
- """A unittest.TestCase extension with some additional methods."""
- maxDiff = None
- pdbclass = Debugger
- tags = Tags()
-
- def __init__(self, methodName='runTest'):
- super(TestCase, self).__init__(methodName)
- self.__exc_info = sys.exc_info
- self.__testMethodName = self._testMethodName
- self._current_test_descr = None
- self._options_ = None
-
- @classproperty
- @cached
- def datadir(cls): # pylint: disable=E0213
- """helper attribute holding the standard test's data directory
-
- NOTE: this is a logilab's standard
- """
- mod = sys.modules[cls.__module__]
- return osp.join(osp.dirname(osp.abspath(mod.__file__)), 'data')
- # cache it (use a class method to cache on class since TestCase is
- # instantiated for each test run)
-
- @classmethod
- def datapath(cls, *fname):
- """joins the object's datadir and `fname`"""
- return osp.join(cls.datadir, *fname)
-
- def set_description(self, descr):
- """sets the current test's description.
- This can be useful for generative tests because it allows to specify
- a description per yield
- """
- self._current_test_descr = descr
-
- # override default's unittest.py feature
- def shortDescription(self):
- """override default unittest shortDescription to handle correctly
- generative tests
- """
- if self._current_test_descr is not None:
- return self._current_test_descr
- return super(TestCase, self).shortDescription()
-
- def quiet_run(self, result, func, *args, **kwargs):
- try:
- func(*args, **kwargs)
- except (KeyboardInterrupt, SystemExit):
- raise
- except unittest.SkipTest as e:
- if hasattr(result, 'addSkip'):
- result.addSkip(self, str(e))
- else:
- warnings.warn("TestResult has no addSkip method, skips not reported",
- RuntimeWarning, 2)
- result.addSuccess(self)
- return False
- except:
- result.addError(self, self.__exc_info())
- return False
- return True
-
- def _get_test_method(self):
- """return the test method"""
- return getattr(self, self._testMethodName)
-
- def optval(self, option, default=None):
- """return the option value or default if the option is not define"""
- return getattr(self._options_, option, default)
-
- def __call__(self, result=None, runcondition=None, options=None):
- """rewrite TestCase.__call__ to support generative tests
- This is mostly a copy/paste from unittest.py (i.e same
- variable names, same logic, except for the generative tests part)
- """
- from logilab.common.pytest import FILE_RESTART
- if result is None:
- result = self.defaultTestResult()
- result.pdbclass = self.pdbclass
- self._options_ = options
- # if result.cvg:
- # result.cvg.start()
- testMethod = self._get_test_method()
- if (getattr(self.__class__, "__unittest_skip__", False) or
- getattr(testMethod, "__unittest_skip__", False)):
- # If the class or method was skipped.
- try:
- skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
- or getattr(testMethod, '__unittest_skip_why__', ''))
- self._addSkip(result, skip_why)
- finally:
- result.stopTest(self)
- return
- if runcondition and not runcondition(testMethod):
- return # test is skipped
- result.startTest(self)
- try:
- if not self.quiet_run(result, self.setUp):
- return
- generative = isgeneratorfunction(testMethod)
- # generative tests
- if generative:
- self._proceed_generative(result, testMethod,
- runcondition)
- else:
- status = self._proceed(result, testMethod)
- success = (status == 0)
- if not self.quiet_run(result, self.tearDown):
- return
- if not generative and success:
- if hasattr(options, "exitfirst") and options.exitfirst:
- # add this test to restart file
- try:
- restartfile = open(FILE_RESTART, 'a')
- try:
- descr = '.'.join((self.__class__.__module__,
- self.__class__.__name__,
- self._testMethodName))
- restartfile.write(descr+os.linesep)
- finally:
- restartfile.close()
- except Exception:
- print("Error while saving succeeded test into",
- osp.join(os.getcwd(), FILE_RESTART),
- file=sys.__stderr__)
- raise
- result.addSuccess(self)
- finally:
- # if result.cvg:
- # result.cvg.stop()
- result.stopTest(self)
-
- def _proceed_generative(self, result, testfunc, runcondition=None):
- # cancel startTest()'s increment
- result.testsRun -= 1
- success = True
- try:
- for params in testfunc():
- if runcondition and not runcondition(testfunc,
- skipgenerator=False):
- if not (isinstance(params, InnerTest)
- and runcondition(params)):
- continue
- if not isinstance(params, (tuple, list)):
- params = (params, )
- func = params[0]
- args, kwargs = parse_generative_args(params[1:])
- # increment test counter manually
- result.testsRun += 1
- status = self._proceed(result, func, args, kwargs)
- if status == 0:
- result.addSuccess(self)
- success = True
- else:
- success = False
- # XXX Don't stop anymore if an error occured
- #if status == 2:
- # result.shouldStop = True
- if result.shouldStop: # either on error or on exitfirst + error
- break
- except:
- # if an error occurs between two yield
- result.addError(self, self.__exc_info())
- success = False
- return success
-
- def _proceed(self, result, testfunc, args=(), kwargs=None):
- """proceed the actual test
- returns 0 on success, 1 on failure, 2 on error
-
- Note: addSuccess can't be called here because we have to wait
- for tearDown to be successfully executed to declare the test as
- successful
- """
- kwargs = kwargs or {}
- try:
- testfunc(*args, **kwargs)
- except self.failureException:
- result.addFailure(self, self.__exc_info())
- return 1
- except KeyboardInterrupt:
- raise
- except InnerTestSkipped as e:
- result.addSkip(self, e)
- return 1
- except SkipTest as e:
- result.addSkip(self, e)
- return 0
- except:
- result.addError(self, self.__exc_info())
- return 2
- return 0
-
- def defaultTestResult(self):
- """return a new instance of the defaultTestResult"""
- return SkipAwareTestResult()
-
- skip = _deprecate(unittest.TestCase.skipTest)
- assertEquals = _deprecate(unittest.TestCase.assertEqual)
- assertNotEquals = _deprecate(unittest.TestCase.assertNotEqual)
- assertAlmostEquals = _deprecate(unittest.TestCase.assertAlmostEqual)
- assertNotAlmostEquals = _deprecate(unittest.TestCase.assertNotAlmostEqual)
-
- def innerSkip(self, msg=None):
- """mark a generative test as skipped for the reason"""
- msg = msg or 'test was skipped'
- raise InnerTestSkipped(msg)
-
- @deprecated('Please use assertDictEqual instead.')
- def assertDictEquals(self, dict1, dict2, msg=None, context=None):
- """compares two dicts
-
- If the two dict differ, the first difference is shown in the error
- message
- :param dict1: a Python Dictionary
- :param dict2: a Python Dictionary
- :param msg: custom message (String) in case of failure
- """
- dict1 = dict(dict1)
- msgs = []
- for key, value in dict2.items():
- try:
- if dict1[key] != value:
- msgs.append('%r != %r for key %r' % (dict1[key], value,
- key))
- del dict1[key]
- except KeyError:
- msgs.append('missing %r key' % key)
- if dict1:
- msgs.append('dict2 is lacking %r' % dict1)
- if msg:
- self.failureException(msg)
- elif msgs:
- if context is not None:
- base = '%s\n' % context
- else:
- base = ''
- self.fail(base + '\n'.join(msgs))
-
- @deprecated('Please use assertCountEqual instead.')
- def assertUnorderedIterableEquals(self, got, expected, msg=None):
- """compares two iterable and shows difference between both
-
- :param got: the unordered Iterable that we found
- :param expected: the expected unordered Iterable
- :param msg: custom message (String) in case of failure
- """
- got, expected = list(got), list(expected)
- self.assertSetEqual(set(got), set(expected), msg)
- if len(got) != len(expected):
- if msg is None:
- msg = ['Iterable have the same elements but not the same number',
- '\t\ti\t']
- got_count = {}
- expected_count = {}
- for element in got:
- got_count[element] = got_count.get(element, 0) + 1
- for element in expected:
- expected_count[element] = expected_count.get(element, 0) + 1
- # we know that got_count.key() == expected_count.key()
- # because of assertSetEqual
- for element, count in got_count.iteritems():
- other_count = expected_count[element]
- if other_count != count:
- msg.append('\t%s\t%s\t%s' % (element, other_count, count))
-
- self.fail(msg)
-
- assertUnorderedIterableEqual = assertUnorderedIterableEquals
- assertUnordIterEquals = assertUnordIterEqual = assertUnorderedIterableEqual
-
- @deprecated('Please use assertSetEqual instead.')
- def assertSetEquals(self,got,expected, msg=None):
- """compares two sets and shows difference between both
-
- Don't use it for iterables other than sets.
-
- :param got: the Set that we found
- :param expected: the second Set to be compared to the first one
- :param msg: custom message (String) in case of failure
- """
-
- if not(isinstance(got, set) and isinstance(expected, set)):
- warnings.warn("the assertSetEquals function if now intended for set only."\
- "use assertUnorderedIterableEquals instead.",
- DeprecationWarning, 2)
- return self.assertUnorderedIterableEquals(got, expected, msg)
-
- items={}
- items['missing'] = expected - got
- items['unexpected'] = got - expected
- if any(items.itervalues()):
- if msg is None:
- msg = '\n'.join('%s:\n\t%s' % (key, "\n\t".join(str(value) for value in values))
- for key, values in items.iteritems() if values)
- self.fail(msg)
-
- @deprecated('Please use assertListEqual instead.')
- def assertListEquals(self, list_1, list_2, msg=None):
- """compares two lists
-
- If the two list differ, the first difference is shown in the error
- message
-
- :param list_1: a Python List
- :param list_2: a second Python List
- :param msg: custom message (String) in case of failure
- """
- _l1 = list_1[:]
- for i, value in enumerate(list_2):
- try:
- if _l1[0] != value:
- from pprint import pprint
- pprint(list_1)
- pprint(list_2)
- self.fail('%r != %r for index %d' % (_l1[0], value, i))
- del _l1[0]
- except IndexError:
- if msg is None:
- msg = 'list_1 has only %d elements, not %s '\
- '(at least %r missing)'% (i, len(list_2), value)
- self.fail(msg)
- if _l1:
- if msg is None:
- msg = 'list_2 is lacking %r' % _l1
- self.fail(msg)
-
- @deprecated('Non-standard. Please use assertMultiLineEqual instead.')
- def assertLinesEquals(self, string1, string2, msg=None, striplines=False):
- """compare two strings and assert that the text lines of the strings
- are equal.
-
- :param string1: a String
- :param string2: a String
- :param msg: custom message (String) in case of failure
- :param striplines: Boolean to trigger line stripping before comparing
- """
- lines1 = string1.splitlines()
- lines2 = string2.splitlines()
- if striplines:
- lines1 = [l.strip() for l in lines1]
- lines2 = [l.strip() for l in lines2]
- self.assertListEqual(lines1, lines2, msg)
- assertLineEqual = assertLinesEquals
-
- @deprecated('Non-standard: please copy test method to your TestCase class')
- def assertXMLWellFormed(self, stream, msg=None, context=2):
- """asserts the XML stream is well-formed (no DTD conformance check)
-
- :param context: number of context lines in standard message
- (show all data if negative).
- Only available with element tree
- """
- try:
- from xml.etree.ElementTree import parse
- self._assertETXMLWellFormed(stream, parse, msg)
- except ImportError:
- from xml.sax import make_parser, SAXParseException
- parser = make_parser()
- try:
- parser.parse(stream)
- except SAXParseException as ex:
- if msg is None:
- stream.seek(0)
- for _ in range(ex.getLineNumber()):
- line = stream.readline()
- pointer = ('' * (ex.getLineNumber() - 1)) + '^'
- msg = 'XML stream not well formed: %s\n%s%s' % (ex, line, pointer)
- self.fail(msg)
-
- @deprecated('Non-standard: please copy test method to your TestCase class')
- def assertXMLStringWellFormed(self, xml_string, msg=None, context=2):
- """asserts the XML string is well-formed (no DTD conformance check)
-
- :param context: number of context lines in standard message
- (show all data if negative).
- Only available with element tree
- """
- try:
- from xml.etree.ElementTree import fromstring
- except ImportError:
- from elementtree.ElementTree import fromstring
- self._assertETXMLWellFormed(xml_string, fromstring, msg)
-
- def _assertETXMLWellFormed(self, data, parse, msg=None, context=2):
- """internal function used by /assertXML(String)?WellFormed/ functions
-
- :param data: xml_data
- :param parse: appropriate parser function for this data
- :param msg: error message
- :param context: number of context lines in standard message
- (show all data if negative).
- Only available with element tree
- """
- from xml.parsers.expat import ExpatError
- try:
- from xml.etree.ElementTree import ParseError
- except ImportError:
- # compatibility for 1:
- if len(tup)<=1:
- self.fail( "tuple %s has no attributes (%s expected)"%(tup,
- dict(element.attrib)))
- self.assertDictEqual(element.attrib, tup[1])
- # check children
- if len(element) or len(tup)>2:
- if len(tup)<=2:
- self.fail( "tuple %s has no children (%i expected)"%(tup,
- len(element)))
- if len(element) != len(tup[2]):
- self.fail( "tuple %s has %i children%s (%i expected)"%(tup,
- len(tup[2]),
- ('', 's')[len(tup[2])>1], len(element)))
- for index in range(len(tup[2])):
- self.assertXMLEqualsTuple(element[index], tup[2][index])
- #check text
- if element.text or len(tup)>3:
- if len(tup)<=3:
- self.fail( "tuple %s has no text value (%r expected)"%(tup,
- element.text))
- self.assertTextEquals(element.text, tup[3])
- #check tail
- if element.tail or len(tup)>4:
- if len(tup)<=4:
- self.fail( "tuple %s has no tail value (%r expected)"%(tup,
- element.tail))
- self.assertTextEquals(element.tail, tup[4])
-
- def _difftext(self, lines1, lines2, junk=None, msg_prefix='Texts differ'):
- junk = junk or (' ', '\t')
- # result is a generator
- result = difflib.ndiff(lines1, lines2, charjunk=lambda x: x in junk)
- read = []
- for line in result:
- read.append(line)
- # lines that don't start with a ' ' are diff ones
- if not line.startswith(' '):
- self.fail('\n'.join(['%s\n'%msg_prefix]+read + list(result)))
-
- @deprecated('Non-standard. Please use assertMultiLineEqual instead.')
- def assertTextEquals(self, text1, text2, junk=None,
- msg_prefix='Text differ', striplines=False):
- """compare two multiline strings (using difflib and splitlines())
-
- :param text1: a Python BaseString
- :param text2: a second Python Basestring
- :param junk: List of Caracters
- :param msg_prefix: String (message prefix)
- :param striplines: Boolean to trigger line stripping before comparing
- """
- msg = []
- if not isinstance(text1, string_types):
- msg.append('text1 is not a string (%s)'%(type(text1)))
- if not isinstance(text2, string_types):
- msg.append('text2 is not a string (%s)'%(type(text2)))
- if msg:
- self.fail('\n'.join(msg))
- lines1 = text1.strip().splitlines(True)
- lines2 = text2.strip().splitlines(True)
- if striplines:
- lines1 = [line.strip() for line in lines1]
- lines2 = [line.strip() for line in lines2]
- self._difftext(lines1, lines2, junk, msg_prefix)
- assertTextEqual = assertTextEquals
-
- @deprecated('Non-standard: please copy test method to your TestCase class')
- def assertStreamEquals(self, stream1, stream2, junk=None,
- msg_prefix='Stream differ'):
- """compare two streams (using difflib and readlines())"""
- # if stream2 is stream2, readlines() on stream1 will also read lines
- # in stream2, so they'll appear different, although they're not
- if stream1 is stream2:
- return
- # make sure we compare from the beginning of the stream
- stream1.seek(0)
- stream2.seek(0)
- # compare
- self._difftext(stream1.readlines(), stream2.readlines(), junk,
- msg_prefix)
-
- assertStreamEqual = assertStreamEquals
-
- @deprecated('Non-standard: please copy test method to your TestCase class')
- def assertFileEquals(self, fname1, fname2, junk=(' ', '\t')):
- """compares two files using difflib"""
- self.assertStreamEqual(open(fname1), open(fname2), junk,
- msg_prefix='Files differs\n-:%s\n+:%s\n'%(fname1, fname2))
-
- assertFileEqual = assertFileEquals
-
- @deprecated('Non-standard: please copy test method to your TestCase class')
- def assertDirEquals(self, path_a, path_b):
- """compares two files using difflib"""
- assert osp.exists(path_a), "%s doesn't exists" % path_a
- assert osp.exists(path_b), "%s doesn't exists" % path_b
-
- all_a = [ (ipath[len(path_a):].lstrip('/'), idirs, ifiles)
- for ipath, idirs, ifiles in os.walk(path_a)]
- all_a.sort(key=itemgetter(0))
-
- all_b = [ (ipath[len(path_b):].lstrip('/'), idirs, ifiles)
- for ipath, idirs, ifiles in os.walk(path_b)]
- all_b.sort(key=itemgetter(0))
-
- iter_a, iter_b = iter(all_a), iter(all_b)
- partial_iter = True
- ipath_a, idirs_a, ifiles_a = data_a = None, None, None
- while True:
- try:
- ipath_a, idirs_a, ifiles_a = datas_a = next(iter_a)
- partial_iter = False
- ipath_b, idirs_b, ifiles_b = datas_b = next(iter_b)
- partial_iter = True
-
-
- self.assertTrue(ipath_a == ipath_b,
- "unexpected %s in %s while looking %s from %s" %
- (ipath_a, path_a, ipath_b, path_b))
-
-
- errors = {}
- sdirs_a = set(idirs_a)
- sdirs_b = set(idirs_b)
- errors["unexpected directories"] = sdirs_a - sdirs_b
- errors["missing directories"] = sdirs_b - sdirs_a
-
- sfiles_a = set(ifiles_a)
- sfiles_b = set(ifiles_b)
- errors["unexpected files"] = sfiles_a - sfiles_b
- errors["missing files"] = sfiles_b - sfiles_a
-
-
- msgs = [ "%s: %s"% (name, items)
- for name, items in errors.items() if items]
-
- if msgs:
- msgs.insert(0, "%s and %s differ :" % (
- osp.join(path_a, ipath_a),
- osp.join(path_b, ipath_b),
- ))
- self.fail("\n".join(msgs))
-
- for files in (ifiles_a, ifiles_b):
- files.sort()
-
- for index, path in enumerate(ifiles_a):
- self.assertFileEquals(osp.join(path_a, ipath_a, path),
- osp.join(path_b, ipath_b, ifiles_b[index]))
-
- except StopIteration:
- break
-
- assertDirEqual = assertDirEquals
-
- def assertIsInstance(self, obj, klass, msg=None, strict=False):
- """check if an object is an instance of a class
-
- :param obj: the Python Object to be checked
- :param klass: the target class
- :param msg: a String for a custom message
- :param strict: if True, check that the class of is ;
- else check with 'isinstance'
- """
- if strict:
- warnings.warn('[API] Non-standard. Strict parameter has vanished',
- DeprecationWarning, stacklevel=2)
- if msg is None:
- if strict:
- msg = '%r is not of class %s but of %s'
- else:
- msg = '%r is not an instance of %s but of %s'
- msg = msg % (obj, klass, type(obj))
- if strict:
- self.assertTrue(obj.__class__ is klass, msg)
- else:
- self.assertTrue(isinstance(obj, klass), msg)
-
- @deprecated('Please use assertIsNone instead.')
- def assertNone(self, obj, msg=None):
- """assert obj is None
-
- :param obj: Python Object to be tested
- """
- if msg is None:
- msg = "reference to %r when None expected"%(obj,)
- self.assertTrue( obj is None, msg )
-
- @deprecated('Please use assertIsNotNone instead.')
- def assertNotNone(self, obj, msg=None):
- """assert obj is not None"""
- if msg is None:
- msg = "unexpected reference to None"
- self.assertTrue( obj is not None, msg )
-
- @deprecated('Non-standard. Please use assertAlmostEqual instead.')
- def assertFloatAlmostEquals(self, obj, other, prec=1e-5,
- relative=False, msg=None):
- """compares if two floats have a distance smaller than expected
- precision.
-
- :param obj: a Float
- :param other: another Float to be comparted to
- :param prec: a Float describing the precision
- :param relative: boolean switching to relative/absolute precision
- :param msg: a String for a custom message
- """
- if msg is None:
- msg = "%r != %r" % (obj, other)
- if relative:
- prec = prec*math.fabs(obj)
- self.assertTrue(math.fabs(obj - other) < prec, msg)
-
- def failUnlessRaises(self, excClass, callableObj=None, *args, **kwargs):
- """override default failUnlessRaises method to return the raised
- exception instance.
-
- Fail unless an exception of class excClass is thrown
- by callableObj when invoked with arguments args and keyword
- arguments kwargs. If a different type of exception is
- thrown, it will not be caught, and the test case will be
- deemed to have suffered an error, exactly as for an
- unexpected exception.
-
- CAUTION! There are subtle differences between Logilab and unittest2
- - exc is not returned in standard version
- - context capabilities in standard version
- - try/except/else construction (minor)
-
- :param excClass: the Exception to be raised
- :param callableObj: a callable Object which should raise
- :param args: a List of arguments for
- :param kwargs: a List of keyword arguments for
- """
- # XXX cube vcslib : test_branches_from_app
- if callableObj is None:
- _assert = super(TestCase, self).assertRaises
- return _assert(excClass, callableObj, *args, **kwargs)
- try:
- callableObj(*args, **kwargs)
- except excClass as exc:
- class ProxyException:
- def __init__(self, obj):
- self._obj = obj
- def __getattr__(self, attr):
- warn_msg = ("This exception was retrieved with the old testlib way "
- "`exc = self.assertRaises(Exc, callable)`, please use "
- "the context manager instead'")
- warnings.warn(warn_msg, DeprecationWarning, 2)
- return self._obj.__getattribute__(attr)
- return ProxyException(exc)
- else:
- if hasattr(excClass, '__name__'):
- excName = excClass.__name__
- else:
- excName = str(excClass)
- raise self.failureException("%s not raised" % excName)
-
- assertRaises = failUnlessRaises
-
- if sys.version_info >= (3,2):
- assertItemsEqual = unittest.TestCase.assertCountEqual
- else:
- assertCountEqual = unittest.TestCase.assertItemsEqual
- if sys.version_info < (2,7):
- def assertIsNotNone(self, value, *args, **kwargs):
- self.assertNotEqual(None, value, *args, **kwargs)
-
-TestCase.assertItemsEqual = deprecated('assertItemsEqual is deprecated, use assertCountEqual')(
- TestCase.assertItemsEqual)
-
-import doctest
-
-class SkippedSuite(unittest.TestSuite):
- def test(self):
- """just there to trigger test execution"""
- self.skipped_test('doctest module has no DocTestSuite class')
-
-
-class DocTestFinder(doctest.DocTestFinder):
-
- def __init__(self, *args, **kwargs):
- self.skipped = kwargs.pop('skipped', ())
- doctest.DocTestFinder.__init__(self, *args, **kwargs)
-
- def _get_test(self, obj, name, module, globs, source_lines):
- """override default _get_test method to be able to skip tests
- according to skipped attribute's value
- """
- if getattr(obj, '__name__', '') in self.skipped:
- return None
- return doctest.DocTestFinder._get_test(self, obj, name, module,
- globs, source_lines)
-
-
-class DocTest(TestCase):
- """trigger module doctest
- I don't know how to make unittest.main consider the DocTestSuite instance
- without this hack
- """
- skipped = ()
- def __call__(self, result=None, runcondition=None, options=None):\
- # pylint: disable=W0613
- try:
- finder = DocTestFinder(skipped=self.skipped)
- suite = doctest.DocTestSuite(self.module, test_finder=finder)
- # XXX iirk
- doctest.DocTestCase._TestCase__exc_info = sys.exc_info
- except AttributeError:
- suite = SkippedSuite()
- # doctest may gork the builtins dictionnary
- # This happen to the "_" entry used by gettext
- old_builtins = builtins.__dict__.copy()
- try:
- return suite.run(result)
- finally:
- builtins.__dict__.clear()
- builtins.__dict__.update(old_builtins)
- run = __call__
-
- def test(self):
- """just there to trigger test execution"""
-
-MAILBOX = None
-
-class MockSMTP:
- """fake smtplib.SMTP"""
-
- def __init__(self, host, port):
- self.host = host
- self.port = port
- global MAILBOX
- self.reveived = MAILBOX = []
-
- def set_debuglevel(self, debuglevel):
- """ignore debug level"""
-
- def sendmail(self, fromaddr, toaddres, body):
- """push sent mail in the mailbox"""
- self.reveived.append((fromaddr, toaddres, body))
-
- def quit(self):
- """ignore quit"""
-
-
-class MockConfigParser(configparser.ConfigParser):
- """fake ConfigParser.ConfigParser"""
-
- def __init__(self, options):
- configparser.ConfigParser.__init__(self)
- for section, pairs in options.iteritems():
- self.add_section(section)
- for key, value in pairs.iteritems():
- self.set(section, key, value)
- def write(self, _):
- raise NotImplementedError()
-
-
-class MockConnection:
- """fake DB-API 2.0 connexion AND cursor (i.e. cursor() return self)"""
-
- def __init__(self, results):
- self.received = []
- self.states = []
- self.results = results
-
- def cursor(self):
- """Mock cursor method"""
- return self
- def execute(self, query, args=None):
- """Mock execute method"""
- self.received.append( (query, args) )
- def fetchone(self):
- """Mock fetchone method"""
- return self.results[0]
- def fetchall(self):
- """Mock fetchall method"""
- return self.results
- def commit(self):
- """Mock commiy method"""
- self.states.append( ('commit', len(self.received)) )
- def rollback(self):
- """Mock rollback method"""
- self.states.append( ('rollback', len(self.received)) )
- def close(self):
- """Mock close method"""
- pass
-
-
-def mock_object(**params):
- """creates an object using params to set attributes
- >>> option = mock_object(verbose=False, index=range(5))
- >>> option.verbose
- False
- >>> option.index
- [0, 1, 2, 3, 4]
- """
- return type('Mock', (), params)()
-
-
-def create_files(paths, chroot):
- """Creates directories and files found in .
-
- :param paths: list of relative paths to files or directories
- :param chroot: the root directory in which paths will be created
-
- >>> from os.path import isdir, isfile
- >>> isdir('/tmp/a')
- False
- >>> create_files(['a/b/foo.py', 'a/b/c/', 'a/b/c/d/e.py'], '/tmp')
- >>> isdir('/tmp/a')
- True
- >>> isdir('/tmp/a/b/c')
- True
- >>> isfile('/tmp/a/b/c/d/e.py')
- True
- >>> isfile('/tmp/a/b/foo.py')
- True
- """
- dirs, files = set(), set()
- for path in paths:
- path = osp.join(chroot, path)
- filename = osp.basename(path)
- # path is a directory path
- if filename == '':
- dirs.add(path)
- # path is a filename path
- else:
- dirs.add(osp.dirname(path))
- files.add(path)
- for dirpath in dirs:
- if not osp.isdir(dirpath):
- os.makedirs(dirpath)
- for filepath in files:
- open(filepath, 'w').close()
-
-
-class AttrObject: # XXX cf mock_object
- def __init__(self, **kwargs):
- self.__dict__.update(kwargs)
-
-def tag(*args, **kwargs):
- """descriptor adding tag to a function"""
- def desc(func):
- assert not hasattr(func, 'tags')
- func.tags = Tags(*args, **kwargs)
- return func
- return desc
-
-def require_version(version):
- """ Compare version of python interpreter to the given one. Skip the test
- if older.
- """
- def check_require_version(f):
- version_elements = version.split('.')
- try:
- compare = tuple([int(v) for v in version_elements])
- except ValueError:
- raise ValueError('%s is not a correct version : should be X.Y[.Z].' % version)
- current = sys.version_info[:3]
- if current < compare:
- def new_f(self, *args, **kwargs):
- self.skipTest('Need at least %s version of python. Current version is %s.' % (version, '.'.join([str(element) for element in current])))
- new_f.__name__ = f.__name__
- return new_f
- else:
- return f
- return check_require_version
-
-def require_module(module):
- """ Check if the given module is loaded. Skip the test if not.
- """
- def check_require_module(f):
- try:
- __import__(module)
- return f
- except ImportError:
- def new_f(self, *args, **kwargs):
- self.skipTest('%s can not be imported.' % module)
- new_f.__name__ = f.__name__
- return new_f
- return check_require_module
-
diff --git a/pymode/libs/logilab_common-1.0.2-py2.7-nspkg.pth b/pymode/libs/logilab_common-1.0.2-py2.7-nspkg.pth
deleted file mode 100644
index d268b884..00000000
--- a/pymode/libs/logilab_common-1.0.2-py2.7-nspkg.pth
+++ /dev/null
@@ -1 +0,0 @@
-import sys, types, os;p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('logilab',));ie = os.path.exists(os.path.join(p,'__init__.py'));m = not ie and sys.modules.setdefault('logilab', types.ModuleType('logilab'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)
diff --git a/pymode/libs/logilab_common-1.0.2.dist-info/METADATA b/pymode/libs/logilab_common-1.0.2.dist-info/METADATA
deleted file mode 100644
index 9a00a498..00000000
--- a/pymode/libs/logilab_common-1.0.2.dist-info/METADATA
+++ /dev/null
@@ -1,169 +0,0 @@
-Metadata-Version: 2.0
-Name: logilab-common
-Version: 1.0.2
-Summary: collection of low-level Python packages and modules used by Logilab projects
-Home-page: http://www.logilab.org/project/logilab-common
-Author: Logilab
-Author-email: contact@logilab.fr
-License: LGPL
-Platform: UNKNOWN
-Classifier: Topic :: Utilities
-Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 2
-Classifier: Programming Language :: Python :: 3
-Requires-Dist: setuptools
-Requires-Dist: six (>=1.4.0)
-
-Logilab's common library
-========================
-
-What's this ?
--------------
-
-This package contains some modules used by different Logilab projects.
-
-It is released under the GNU Lesser General Public License.
-
-There is no documentation available yet but the source code should be clean and
-well documented.
-
-Designed to ease:
-
-* handling command line options and configuration files
-* writing interactive command line tools
-* manipulation of files and character strings
-* manipulation of common structures such as graph, tree, and pattern such as visitor
-* generating text and HTML reports
-* more...
-
-
-Installation
-------------
-
-Extract the tarball, jump into the created directory and run ::
-
- python setup.py install
-
-For installation options, see ::
-
- python setup.py install --help
-
-
-Provided modules
-----------------
-
-Here is a brief description of the available modules.
-
-Modules providing high-level features
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-* `cache`, a cache implementation with a least recently used algorithm.
-
-* `changelog`, a tiny library to manipulate our simplified ChangeLog file format.
-
-* `clcommands`, high-level classes to define command line programs handling
- different subcommands. It is based on `configuration` to get easy command line
- / configuration file handling.
-
-* `configuration`, some classes to handle unified configuration from both
- command line (using optparse) and configuration file (using ConfigParser).
-
-* `proc`, interface to Linux /proc.
-
-* `umessage`, unicode email support.
-
-* `ureports`, micro-reports, a way to create simple reports using python objects
- without care of the final formatting. ReST and html formatters are provided.
-
-
-Modules providing low-level functions and structures
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-* `compat`, provides a transparent compatibility layer between different python
- versions.
-
-* `date`, a set of date manipulation functions.
-
-* `daemon`, a daemon function and mix-in class to properly start an Unix daemon
- process.
-
-* `decorators`, function decorators such as cached, timed...
-
-* `deprecation`, decorator, metaclass & all to mark functions / classes as
- deprecated or moved
-
-* `fileutils`, some file / file path manipulation utilities.
-
-* `graph`, graph manipulations functions such as cycle detection, bases for dot
- file generation.
-
-* `modutils`, python module manipulation functions.
-
-* `shellutils`, some powerful shell like functions to replace shell scripts with
- python scripts.
-
-* `tasksqueue`, a prioritized tasks queue implementation.
-
-* `textutils`, some text manipulation functions (ansi colorization, line wrapping,
- rest support...).
-
-* `tree`, base class to represent tree structure, and some others to make it
- works with the visitor implementation (see below).
-
-* `visitor`, a generic visitor pattern implementation.
-
-
-Modules extending some standard modules
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-* `debugger`, `pdb` customization.
-
-* `logging_ext`, extensions to `logging` module such as a colorized formatter
- and an easier initialization function.
-
-* `optik_ext`, defines some new option types (regexp, csv, color, date, etc.)
- for `optik` / `optparse`
-
-
-Modules extending some external modules
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-* `sphinx_ext`, Sphinx_ plugin defining a `autodocstring` directive.
-
-* `vcgutils` , utilities functions to generate file readable with Georg Sander's
- vcg tool (Visualization of Compiler Graphs).
-
-
-To be deprecated modules
-~~~~~~~~~~~~~~~~~~~~~~~~
-
-Those `logilab.common` modules will much probably be deprecated in future
-versions:
-
-* `testlib`: use `unittest2`_ instead
-* `pytest`: use `discover`_ instead
-* `interface`: use `zope.interface`_ if you really want this
-* `table`, `xmlutils`: is that used?
-* `sphinxutils`: we won't go that way imo (i == syt)
-
-
-Comments, support, bug reports
-------------------------------
-
-Project page https://www.logilab.org/project/logilab-common
-
-Use the python-projects@lists.logilab.org mailing list.
-
-You can subscribe to this mailing list at
-https://lists.logilab.org/mailman/listinfo/python-projects
-
-Archives are available at
-https://lists.logilab.org/pipermail/python-projects/
-
-
-.. _Sphinx: http://sphinx.pocoo.org/
-.. _`unittest2`: http://pypi.python.org/pypi/unittest2
-.. _`discover`: http://pypi.python.org/pypi/discover
-.. _`zope.interface`: http://pypi.python.org/pypi/zope.interface
-
-
diff --git a/pymode/libs/logilab_common-1.0.2.dist-info/RECORD b/pymode/libs/logilab_common-1.0.2.dist-info/RECORD
deleted file mode 100644
index e6e4730a..00000000
--- a/pymode/libs/logilab_common-1.0.2.dist-info/RECORD
+++ /dev/null
@@ -1,87 +0,0 @@
-logilab_common-1.0.2-py2.7-nspkg.pth,sha256=ZY-Jf8tK2WQu_mjLvZuFpvpX9uwdpX3yDS1AuRncCZA,308
-logilab/common/__init__.py,sha256=UiR9rv7f7WsAnIHsxa3UApVCJGTzXbZoC-c4EQJpcvg,5390
-logilab/common/cache.py,sha256=wmY87WSoyERDhAlfIKKUipYavlZPpm3sGAQMpzbDHTM,3621
-logilab/common/changelog.py,sha256=Ea_4j22rWJJ33VSCj4Lz0pBGP0wP7LMP2Zo4DR7iZIo,8075
-logilab/common/clcommands.py,sha256=abMNAsB6ADT7Ns5MsxNtAMOlTQGJLCMO9MUkNYdsVG8,11237
-logilab/common/compat.py,sha256=rMGytWS1DCo35MdKUocU1LfLbZA0RyK79Gyu7lvd6Rg,2593
-logilab/common/configuration.py,sha256=s4rg7Qa1_4bpWlTg-bEaHYUcrgvuoDt75ZJgRnlFsME,42160
-logilab/common/daemon.py,sha256=Eqwo_oKjrHtS9SLrtSfeghRTCjqvveGho43s7vMkd7A,3337
-logilab/common/date.py,sha256=nnUN-4onEaWSR8r4PvtmJyn5ukfFzasjEcOGzEdrvqQ,11230
-logilab/common/debugger.py,sha256=Bw2-yI9KrvSgPLDksda4F8nuK_DvxnSCS-ymPSVc778,7094
-logilab/common/decorators.py,sha256=4DD3iNgEQPVz5hPp-SbbgD-ZObXhaeazGqKleyHdXaw,8868
-logilab/common/deprecation.py,sha256=MAxc_Ds9H_j6C7d4VQqMQPB1j-Ib8vy7iBWoQa8aRHs,7417
-logilab/common/fileutils.py,sha256=kCk_8odmAKnYPHPhUruuV-6og8N9kT8fplV-pvwwd4A,12738
-logilab/common/graph.py,sha256=GTSN-kP40EHjnHXk1vxO-56rEszo-esu1S3hf-SOddw,10247
-logilab/common/interface.py,sha256=dXl6kiuXSpefxauu7J6CUv0soe09wjT4_vXbeWQFgJ8,2593
-logilab/common/logging_ext.py,sha256=Yi8k2fGqr_tt-YApT1JjroNpXETxfj84HKmgTgO22Nw,6975
-logilab/common/modutils.py,sha256=w2LVy_vzhGoyBRrKivx0hqx8n326KrtTUezelEwDAcc,24002
-logilab/common/optik_ext.py,sha256=_aZgWKTKCC8_vYIpstNCOk8wewwZ4jfrpvXWrmPzn5Y,13451
-logilab/common/optparser.py,sha256=QgDoAyVoRy7U1fG9BSZ0O7LQsyNayo1HAelZaKlb4kY,3386
-logilab/common/proc.py,sha256=RGMlPuc11FfrIsqzqNFO3Q6buqt8dvMwXfXKXfwAHks,9352
-logilab/common/pytest.py,sha256=ac7hVpAb06TstSjPV586h1wW21Y__XH5bjrwX55dDOE,46736
-logilab/common/registry.py,sha256=0qIJfNJiqM1HkI-twKHfXiTPU5HKSGRrS-P0Dsj56qw,41550
-logilab/common/shellutils.py,sha256=ZFZ19eX0TCcDrsbOWiy7sr1oqnhQsLixv9n8HakcJiM,14363
-logilab/common/sphinx_ext.py,sha256=pbKN0ObMDY_jy9ehP_7NOKMo40LbQLjf0xntmxHnGr8,3329
-logilab/common/sphinxutils.py,sha256=piY1R04GNR-i1mIb4PRhbGbmbDZPhDsn1FBAiA_Bbrg,4444
-logilab/common/table.py,sha256=5NEx4Ju-jk2CV6W-jxTpOoYArt2BlRpaTZZUBGwu1kg,31408
-logilab/common/tasksqueue.py,sha256=wFE0C0FiuHGBoCnvU-_Kno1eM_Em6yYxYvND6emRN34,2987
-logilab/common/testlib.py,sha256=2Ra9OPs5QpQv7hoZod3M2yYCUdtqSaN3LAvVyiQyA1k,50506
-logilab/common/textutils.py,sha256=TgPGqkN3JsJuR7VxnkoWaOWfkwHiVNB9gpId_3S2xO4,17277
-logilab/common/tree.py,sha256=Y-sa_pfI17cCb-vkyJMaBW3XKVNrreexBgBMPpQJDy0,10606
-logilab/common/umessage.py,sha256=2BuxspHkPEXhlf-XVDye25Mt0RUELneay-K1KNLcS9c,6551
-logilab/common/urllib2ext.py,sha256=FOpxVrbAPtY_6ssq3Qui3zxzckAqLJe9kGkp8tLR0Ic,3416
-logilab/common/vcgutils.py,sha256=tNfi6jxZ4xdUvrjw1cKOodecRlcD0U3MQvTb5HrY5fE,7673
-logilab/common/visitor.py,sha256=5Oc9Y88Kx4wiZ6JAFYFeXwKrMS8jNph9ENVWG3oim1E,3444
-logilab/common/xmlutils.py,sha256=2e4FM-X1PLKBaTG6etLHsAIrtZQiDEA9U7WqM3KjNks,2273
-logilab/common/ureports/__init__.py,sha256=b3_8f4mAm6T3O_-klutleWZ99XjlR-AELfuLEyCbzQ8,6113
-logilab/common/ureports/docbook_writer.py,sha256=KSkIk0W4C4E6DR-Ul_Y9jgnd4_tgVVu15LnU8p2RoeM,5706
-logilab/common/ureports/html_writer.py,sha256=Ee_x9rXjx2NZp290e-0C7nu7VYuKpkCsrl79m4HLI5g,4956
-logilab/common/ureports/nodes.py,sha256=t2NQiL6LQV94D8ugitklVnZRVbz6kP5QkUrl8zGsmMQ,5838
-logilab/common/ureports/text_writer.py,sha256=cMBHbA36_1NrKKnx5LBKczGQmBRg4aObkpr1d581ORU,5212
-../../bin/pytest,sha256=vkYcOC21mDzGBrz4-ajilr8TGxa9tRabxQhyYyXeEDE,124
-logilab_common-1.0.2.dist-info/DESCRIPTION.rst,sha256=bMLyPRBRS-tSzW5zhchxcLlPbYHRv0XEMqs6Oln2z5U,4426
-logilab_common-1.0.2.dist-info/METADATA,sha256=3_iFYhN84fXSjkdjzHv3grHBY2xIZVLSkmuBeTSnLQE,4934
-logilab_common-1.0.2.dist-info/metadata.json,sha256=dTwpZUieC7dZFkKiNdtgVExm2w1B44k4ZDSaCP3ASXo,742
-logilab_common-1.0.2.dist-info/namespace_packages.txt,sha256=xXemaIbd-285ANf3yiCDkMHRTZSuLvlqL_MTLEJKMuk,8
-logilab_common-1.0.2.dist-info/RECORD,,
-logilab_common-1.0.2.dist-info/top_level.txt,sha256=xXemaIbd-285ANf3yiCDkMHRTZSuLvlqL_MTLEJKMuk,8
-logilab_common-1.0.2.dist-info/WHEEL,sha256=54bVun1KfEBTJ68SHUmbxNPj80VxlQ0sHi4gZdGZXEY,92
-logilab/common/logging_ext.pyc,,
-logilab/common/date.pyc,,
-logilab/common/modutils.pyc,,
-logilab/common/ureports/__init__.pyc,,
-logilab/common/sphinxutils.pyc,,
-logilab/common/ureports/text_writer.pyc,,
-logilab/common/optik_ext.pyc,,
-logilab/common/visitor.pyc,,
-logilab/common/debugger.pyc,,
-logilab/common/compat.pyc,,
-logilab/common/decorators.pyc,,
-logilab/common/textutils.pyc,,
-logilab/common/ureports/docbook_writer.pyc,,
-logilab/common/shellutils.pyc,,
-logilab/common/changelog.pyc,,
-logilab/common/interface.pyc,,
-logilab/common/ureports/nodes.pyc,,
-logilab/common/pytest.pyc,,
-logilab/common/sphinx_ext.pyc,,
-logilab/common/xmlutils.pyc,,
-logilab/common/__init__.pyc,,
-logilab/common/tree.pyc,,
-logilab/common/umessage.pyc,,
-logilab/common/registry.pyc,,
-logilab/common/proc.pyc,,
-logilab/common/urllib2ext.pyc,,
-logilab/common/testlib.pyc,,
-logilab/common/clcommands.pyc,,
-logilab/common/ureports/html_writer.pyc,,
-logilab/common/vcgutils.pyc,,
-logilab/common/daemon.pyc,,
-logilab/common/table.pyc,,
-logilab/common/optparser.pyc,,
-logilab/common/deprecation.pyc,,
-logilab/common/tasksqueue.pyc,,
-logilab/common/fileutils.pyc,,
-logilab/common/graph.pyc,,
-logilab/common/cache.pyc,,
-logilab/common/configuration.pyc,,
diff --git a/pymode/libs/logilab_common-1.0.2.dist-info/WHEEL b/pymode/libs/logilab_common-1.0.2.dist-info/WHEEL
deleted file mode 100644
index 45a0cd88..00000000
--- a/pymode/libs/logilab_common-1.0.2.dist-info/WHEEL
+++ /dev/null
@@ -1,5 +0,0 @@
-Wheel-Version: 1.0
-Generator: bdist_wheel (0.24.0)
-Root-Is-Purelib: true
-Tag: py2-none-any
-
diff --git a/pymode/libs/logilab_common-1.0.2.dist-info/metadata.json b/pymode/libs/logilab_common-1.0.2.dist-info/metadata.json
deleted file mode 100644
index 54212666..00000000
--- a/pymode/libs/logilab_common-1.0.2.dist-info/metadata.json
+++ /dev/null
@@ -1 +0,0 @@
-{"license": "LGPL", "name": "logilab-common", "metadata_version": "2.0", "generator": "bdist_wheel (0.24.0)", "test_requires": [{"requires": ["pytz"]}], "summary": "collection of low-level Python packages and modules used by Logilab projects", "run_requires": [{"requires": ["setuptools", "six (>=1.4.0)"]}], "version": "1.0.2", "extensions": {"python.details": {"project_urls": {"Home": "http://www.logilab.org/project/logilab-common"}, "document_names": {"description": "DESCRIPTION.rst"}, "contacts": [{"role": "author", "email": "contact@logilab.fr", "name": "Logilab"}]}}, "classifiers": ["Topic :: Utilities", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3"], "extras": []}
\ No newline at end of file
diff --git a/pymode/libs/logilab_common-1.0.2.dist-info/namespace_packages.txt b/pymode/libs/logilab_common-1.0.2.dist-info/namespace_packages.txt
deleted file mode 100644
index 3ac267a9..00000000
--- a/pymode/libs/logilab_common-1.0.2.dist-info/namespace_packages.txt
+++ /dev/null
@@ -1 +0,0 @@
-logilab
diff --git a/pymode/libs/logilab_common-1.0.2.dist-info/top_level.txt b/pymode/libs/logilab_common-1.0.2.dist-info/top_level.txt
deleted file mode 100644
index 3ac267a9..00000000
--- a/pymode/libs/logilab_common-1.0.2.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-logilab
diff --git a/pymode/libs/pylint b/pymode/libs/pylint
new file mode 120000
index 00000000..0d144c06
--- /dev/null
+++ b/pymode/libs/pylint
@@ -0,0 +1 @@
+../../submodules/pylint/pylint
\ No newline at end of file
diff --git a/pymode/libs/pylint/__init__.py b/pymode/libs/pylint/__init__.py
deleted file mode 100644
index ba882ea6..00000000
--- a/pymode/libs/pylint/__init__.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (c) 2014-2016 Claudiu Popa
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-import sys
-
-from .__pkginfo__ import version as __version__
-
-def run_pylint():
- """run pylint"""
- from pylint.lint import Run
- Run(sys.argv[1:])
-
-
-def run_epylint():
- """run pylint"""
- from pylint.epylint import Run
- Run()
-
-def run_pyreverse():
- """run pyreverse"""
- from pylint.pyreverse.main import Run
- Run(sys.argv[1:])
-
-def run_symilar():
- """run symilar"""
- from pylint.checkers.similar import Run
- Run(sys.argv[1:])
diff --git a/pymode/libs/pylint/__main__.py b/pymode/libs/pylint/__main__.py
deleted file mode 100644
index f1ecf1b9..00000000
--- a/pymode/libs/pylint/__main__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-#!/usr/bin/env python
-import pylint
-pylint.run_pylint()
diff --git a/pymode/libs/pylint/__pkginfo__.py b/pymode/libs/pylint/__pkginfo__.py
deleted file mode 100644
index 099da4ba..00000000
--- a/pymode/libs/pylint/__pkginfo__.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# Copyright (c) 2006-2015 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2013-2014 Google, Inc.
-# Copyright (c) 2014-2016 Claudiu Popa
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-# pylint: disable=W0622,C0103
-"""pylint packaging information"""
-
-from __future__ import absolute_import
-
-from os.path import join
-from sys import version_info as py_version
-
-from pkg_resources import parse_version
-from setuptools import __version__ as setuptools_version
-
-modname = distname = 'pylint'
-
-numversion = (1, 7, 2)
-version = '.'.join([str(num) for num in numversion])
-
-install_requires = [
- 'astroid>=1.5.1',
- 'six',
- 'isort >= 4.2.5',
- 'mccabe',
-]
-
-dependency_links = []
-
-extras_require = {}
-extras_require[':sys_platform=="win32"'] = ['colorama']
-
-
-def has_environment_marker_range_operators_support():
- """Code extracted from 'pytest/setup.py'
- https://github.com/pytest-dev/pytest/blob/7538680c/setup.py#L31
- The first known release to support environment marker with range operators
- it is 17.1, see: https://setuptools.readthedocs.io/en/latest/history.html#id113
- """
- return parse_version(setuptools_version) >= parse_version('17.1')
-
-
-if has_environment_marker_range_operators_support():
- extras_require[':python_version=="2.7"'] = ['configparser', 'backports.functools_lru_cache']
- extras_require[':python_version<"3.4"'] = ['singledispatch']
-else:
- if (py_version.major, py_version.minor) == (2, 7):
- install_requires.extend(['configparser', 'backports.functools_lru_cache'])
- if py_version < (3, 4):
- install_requires.extend(['singledispatch'])
-
-
-license = 'GPL'
-description = "python code static checker"
-web = 'https://github.com/PyCQA/pylint'
-mailinglist = "mailto:code-quality@python.org"
-author = 'Python Code Quality Authority'
-author_email = 'code-quality@python.org'
-
-classifiers = ['Development Status :: 4 - Beta',
- 'Environment :: Console',
- 'Intended Audience :: Developers',
- 'License :: OSI Approved :: GNU General Public License (GPL)',
- 'Operating System :: OS Independent',
- 'Programming Language :: Python',
- 'Programming Language :: Python :: 2',
- 'Programming Language :: Python :: 3',
- 'Topic :: Software Development :: Debuggers',
- 'Topic :: Software Development :: Quality Assurance',
- 'Topic :: Software Development :: Testing'
- ]
-
-
-long_desc = """\
- Pylint is a Python source code analyzer which looks for programming
- errors, helps enforcing a coding standard and sniffs for some code
- smells (as defined in Martin Fowler's Refactoring book)
- .
- Pylint can be seen as another PyChecker since nearly all tests you
- can do with PyChecker can also be done with Pylint. However, Pylint
- offers some more features, like checking length of lines of code,
- checking if variable names are well-formed according to your coding
- standard, or checking if declared interfaces are truly implemented,
- and much more.
- .
- Additionally, it is possible to write plugins to add your own checks.
- .
- Pylint is shipped with "pyreverse" (UML diagram generator)
- and "symilar" (an independent similarities checker)."""
-
-scripts = [join('bin', filename)
- for filename in ('pylint', "symilar", "epylint",
- "pyreverse")]
-
-include_dirs = [join('pylint', 'test')]
diff --git a/pymode/libs/pylint/checkers/__init__.py b/pymode/libs/pylint/checkers/__init__.py
deleted file mode 100644
index 78921870..00000000
--- a/pymode/libs/pylint/checkers/__init__.py
+++ /dev/null
@@ -1,116 +0,0 @@
-# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2013-2014 Google, Inc.
-# Copyright (c) 2014-2016 Claudiu Popa
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""utilities methods and classes for checkers
-
-Base id of standard checkers (used in msg and report ids):
-01: base
-02: classes
-03: format
-04: import
-05: misc
-06: variables
-07: exceptions
-08: similar
-09: design_analysis
-10: newstyle
-11: typecheck
-12: logging
-13: string_format
-14: string_constant
-15: stdlib
-16: python3
-17: refactoring
-18-50: not yet used: reserved for future internal checkers.
-51-99: perhaps used: reserved for external checkers
-
-The raw_metrics checker has no number associated since it doesn't emit any
-messages nor reports. XXX not true, emit a 07 report !
-
-"""
-
-import sys
-import tokenize
-import warnings
-
-from pylint.config import OptionsProviderMixIn
-from pylint.reporters import diff_string
-from pylint.utils import register_plugins
-from pylint.interfaces import UNDEFINED
-
-
-def table_lines_from_stats(stats, old_stats, columns):
- """get values listed in from and ,
- and return a formated list of values, designed to be given to a
- ureport.Table object
- """
- lines = []
- for m_type in columns:
- new = stats[m_type]
- format = str # pylint: disable=redefined-builtin
- if isinstance(new, float):
- format = lambda num: '%.3f' % num
- old = old_stats.get(m_type)
- if old is not None:
- diff_str = diff_string(old, new)
- old = format(old)
- else:
- old, diff_str = 'NC', 'NC'
- lines += (m_type.replace('_', ' '), format(new), old, diff_str)
- return lines
-
-
-class BaseChecker(OptionsProviderMixIn):
- """base class for checkers"""
- # checker name (you may reuse an existing one)
- name = None
- # options level (0 will be displaying in --help, 1 in --long-help)
- level = 1
- # ordered list of options to control the ckecker behaviour
- options = ()
- # messages issued by this checker
- msgs = {}
- # reports issued by this checker
- reports = ()
- # mark this checker as enabled or not.
- enabled = True
-
- def __init__(self, linter=None):
- """checker instances should have the linter as argument
-
- linter is an object implementing ILinter
- """
- self.name = self.name.lower()
- OptionsProviderMixIn.__init__(self)
- self.linter = linter
-
- def add_message(self, msg_id, line=None, node=None, args=None, confidence=UNDEFINED):
- """add a message of a given type"""
- self.linter.add_message(msg_id, line, node, args, confidence)
-
- # dummy methods implementing the IChecker interface
-
- def open(self):
- """called before visiting project (i.e set of modules)"""
-
- def close(self):
- """called after visiting project (i.e set of modules)"""
-
-
-class BaseTokenChecker(BaseChecker):
- """Base class for checkers that want to have access to the token stream."""
-
- def process_tokens(self, tokens):
- """Should be overridden by subclasses."""
- raise NotImplementedError()
-
-
-def initialize(linter):
- """initialize linter with checkers in this package """
- register_plugins(linter, __path__[0])
-
-__all__ = ('BaseChecker', 'initialize')
diff --git a/pymode/libs/pylint/checkers/async.py b/pymode/libs/pylint/checkers/async.py
deleted file mode 100644
index 6d759b24..00000000
--- a/pymode/libs/pylint/checkers/async.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# Copyright (c) 2015-2016 Claudiu Popa
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""Checker for anything related to the async protocol (PEP 492)."""
-
-import sys
-
-import astroid
-from astroid import exceptions
-
-from pylint import checkers
-from pylint.checkers import utils as checker_utils
-from pylint import interfaces
-from pylint import utils
-
-
-class AsyncChecker(checkers.BaseChecker):
- __implements__ = interfaces.IAstroidChecker
- name = 'async'
- msgs = {
- 'E1700': ('Yield inside async function',
- 'yield-inside-async-function',
- 'Used when an `yield` or `yield from` statement is '
- 'found inside an async function.',
- {'minversion': (3, 5)}),
- 'E1701': ("Async context manager '%s' doesn't implement __aenter__ and __aexit__.",
- 'not-async-context-manager',
- 'Used when an async context manager is used with an object '
- 'that does not implement the async context management protocol.',
- {'minversion': (3, 5)}),
- }
-
- def open(self):
- self._ignore_mixin_members = utils.get_global_option(self, 'ignore-mixin-members')
-
- @checker_utils.check_messages('yield-inside-async-function')
- def visit_asyncfunctiondef(self, node):
- for child in node.nodes_of_class(astroid.Yield):
- if child.scope() is node and (sys.version_info[:2] == (3, 5) or
- isinstance(child, astroid.YieldFrom)):
- self.add_message('yield-inside-async-function', node=child)
-
- @checker_utils.check_messages('not-async-context-manager')
- def visit_asyncwith(self, node):
- for ctx_mgr, _ in node.items:
- infered = checker_utils.safe_infer(ctx_mgr)
- if infered is None or infered is astroid.YES:
- continue
-
- if isinstance(infered, astroid.Instance):
- try:
- infered.getattr('__aenter__')
- infered.getattr('__aexit__')
- except exceptions.NotFoundError:
- if isinstance(infered, astroid.Instance):
- # If we do not know the bases of this class,
- # just skip it.
- if not checker_utils.has_known_bases(infered):
- continue
- # Just ignore mixin classes.
- if self._ignore_mixin_members:
- if infered.name[-5:].lower() == 'mixin':
- continue
- else:
- continue
-
- self.add_message('not-async-context-manager',
- node=node, args=(infered.name, ))
-
-
-def register(linter):
- """required method to auto register this checker"""
- linter.register_checker(AsyncChecker(linter))
diff --git a/pymode/libs/pylint/checkers/base.py b/pymode/libs/pylint/checkers/base.py
deleted file mode 100644
index a0d8c431..00000000
--- a/pymode/libs/pylint/checkers/base.py
+++ /dev/null
@@ -1,1660 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2006-2015 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2012-2014 Google, Inc.
-# Copyright (c) 2013-2016 Claudiu Popa
-# Copyright (c) 2014 Brett Cannon
-# Copyright (c) 2015 Radu Ciorba
-# Copyright (c) 2015 Michael Kefeder
-# Copyright (c) 2015 Dmitry Pribysh
-# Copyright (c) 2015 Stephane Wirtel
-# Copyright (c) 2015 Nick Bastin
-# Copyright (c) 2016 Alex Jurkiewicz
-# Copyright (c) 2016 Yannack
-# Copyright (c) 2016 Laura Médioni
-# Copyright (c) 2016 Ashley Whetter
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""basic checker for Python code"""
-
-import collections
-import itertools
-import sys
-import re
-
-import six
-from six.moves import zip # pylint: disable=redefined-builtin
-
-import astroid
-import astroid.bases
-import astroid.scoped_nodes
-
-from pylint import checkers
-from pylint import exceptions
-from pylint import interfaces
-from pylint.checkers import utils
-from pylint import reporters
-from pylint.reporters.ureports import nodes as reporter_nodes
-
-
-# regex for class/function/variable/constant name
-CLASS_NAME_RGX = re.compile('[A-Z_][a-zA-Z0-9]+$')
-MOD_NAME_RGX = re.compile('(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$')
-CONST_NAME_RGX = re.compile('(([A-Z_][A-Z0-9_]*)|(__.*__))$')
-COMP_VAR_RGX = re.compile('[A-Za-z_][A-Za-z0-9_]*$')
-DEFAULT_NAME_RGX = re.compile('(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$')
-CLASS_ATTRIBUTE_RGX = re.compile(r'([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$')
-# do not require a doc string on private/system methods
-NO_REQUIRED_DOC_RGX = re.compile('^_')
-REVERSED_PROTOCOL_METHOD = '__reversed__'
-SEQUENCE_PROTOCOL_METHODS = ('__getitem__', '__len__')
-REVERSED_METHODS = (SEQUENCE_PROTOCOL_METHODS,
- (REVERSED_PROTOCOL_METHOD, ))
-TYPECHECK_COMPARISON_OPERATORS = frozenset(('is', 'is not', '==',
- '!=', 'in', 'not in'))
-LITERAL_NODE_TYPES = (astroid.Const, astroid.Dict, astroid.List, astroid.Set)
-UNITTEST_CASE = 'unittest.case'
-BUILTINS = six.moves.builtins.__name__
-TYPE_QNAME = "%s.type" % BUILTINS
-PY33 = sys.version_info >= (3, 3)
-PY3K = sys.version_info >= (3, 0)
-PY35 = sys.version_info >= (3, 5)
-
-# Name categories that are always consistent with all naming conventions.
-EXEMPT_NAME_CATEGORIES = {'exempt', 'ignore'}
-
-# A mapping from builtin-qname -> symbol, to be used when generating messages
-# about dangerous default values as arguments
-DEFAULT_ARGUMENT_SYMBOLS = dict(
- zip(['.'.join([BUILTINS, x]) for x in ('set', 'dict', 'list')],
- ['set()', '{}', '[]'])
-)
-REVERSED_COMPS = {'<': '>', '<=': '>=', '>': '<', '>=': '<='}
-
-
-def _redefines_import(node):
- """ Detect that the given node (AssName) is inside an
- exception handler and redefines an import from the tryexcept body.
- Returns True if the node redefines an import, False otherwise.
- """
- current = node
- while current and not isinstance(current.parent, astroid.ExceptHandler):
- current = current.parent
- if not current or not utils.error_of_type(current.parent, ImportError):
- return False
- try_block = current.parent.parent
- for import_node in try_block.nodes_of_class((astroid.ImportFrom, astroid.Import)):
- for name, alias in import_node.names:
- if alias:
- if alias == node.name:
- return True
- elif name == node.name:
- return True
- return False
-
-
-def in_loop(node):
- """return True if the node is inside a kind of for loop"""
- parent = node.parent
- while parent is not None:
- if isinstance(parent, (astroid.For, astroid.ListComp, astroid.SetComp,
- astroid.DictComp, astroid.GeneratorExp)):
- return True
- parent = parent.parent
- return False
-
-
-def in_nested_list(nested_list, obj):
- """return true if the object is an element of or of a nested
- list
- """
- for elmt in nested_list:
- if isinstance(elmt, (list, tuple)):
- if in_nested_list(elmt, obj):
- return True
- elif elmt == obj:
- return True
- return False
-
-
-def _loop_exits_early(loop):
- """Returns true if a loop has a break statement in its body."""
- loop_nodes = (astroid.For, astroid.While)
- # Loop over body explicitly to avoid matching break statements
- # in orelse.
- for child in loop.body:
- if isinstance(child, loop_nodes):
- # break statement may be in orelse of child loop.
- # pylint: disable=superfluous-parens
- for orelse in (child.orelse or ()):
- for _ in orelse.nodes_of_class(astroid.Break, skip_klass=loop_nodes):
- return True
- continue
- for _ in child.nodes_of_class(astroid.Break, skip_klass=loop_nodes):
- return True
- return False
-
-
-def _is_multi_naming_match(match, node_type, confidence):
- return (match is not None and
- match.lastgroup is not None and
- match.lastgroup not in EXEMPT_NAME_CATEGORIES
- and (node_type != 'method' or confidence != interfaces.INFERENCE_FAILURE))
-
-
-if sys.version_info < (3, 0):
- BUILTIN_PROPERTY = '__builtin__.property'
-else:
- BUILTIN_PROPERTY = 'builtins.property'
-
-
-def _get_properties(config):
- """Returns a tuple of property classes and names.
-
- Property classes are fully qualified, such as 'abc.abstractproperty' and
- property names are the actual names, such as 'abstract_property'.
- """
- property_classes = set((BUILTIN_PROPERTY,))
- property_names = set() # Not returning 'property', it has its own check.
- if config is not None:
- property_classes.update(config.property_classes)
- property_names.update((prop.rsplit('.', 1)[-1]
- for prop in config.property_classes))
- return property_classes, property_names
-
-
-def _determine_function_name_type(node, config=None):
- """Determine the name type whose regex the a function's name should match.
-
- :param node: A function node.
- :type node: astroid.node_classes.NodeNG
- :param config: Configuration from which to pull additional property classes.
- :type config: :class:`optparse.Values`
-
- :returns: One of ('function', 'method', 'attr')
- :rtype: str
- """
- property_classes, property_names = _get_properties(config)
- if not node.is_method():
- return 'function'
- if node.decorators:
- decorators = node.decorators.nodes
- else:
- decorators = []
- for decorator in decorators:
- # If the function is a property (decorated with @property
- # or @abc.abstractproperty), the name type is 'attr'.
- if (isinstance(decorator, astroid.Name) or
- (isinstance(decorator, astroid.Attribute) and
- decorator.attrname in property_names)):
- infered = utils.safe_infer(decorator)
- if infered and infered.qname() in property_classes:
- return 'attr'
- # If the function is decorated using the prop_method.{setter,getter}
- # form, treat it like an attribute as well.
- elif (isinstance(decorator, astroid.Attribute) and
- decorator.attrname in ('setter', 'deleter')):
- return 'attr'
- return 'method'
-
-
-def _has_abstract_methods(node):
- """
- Determine if the given `node` has abstract methods.
-
- The methods should be made abstract by decorating them
- with `abc` decorators.
- """
- return len(utils.unimplemented_abstract_methods(node)) > 0
-
-
-def report_by_type_stats(sect, stats, old_stats):
- """make a report of
-
- * percentage of different types documented
- * percentage of different types with a bad name
- """
- # percentage of different types documented and/or with a bad name
- nice_stats = {}
- for node_type in ('module', 'class', 'method', 'function'):
- try:
- total = stats[node_type]
- except KeyError:
- raise exceptions.EmptyReportError()
- nice_stats[node_type] = {}
- if total != 0:
- try:
- documented = total - stats['undocumented_'+node_type]
- percent = (documented * 100.) / total
- nice_stats[node_type]['percent_documented'] = '%.2f' % percent
- except KeyError:
- nice_stats[node_type]['percent_documented'] = 'NC'
- try:
- percent = (stats['badname_'+node_type] * 100.) / total
- nice_stats[node_type]['percent_badname'] = '%.2f' % percent
- except KeyError:
- nice_stats[node_type]['percent_badname'] = 'NC'
- lines = ('type', 'number', 'old number', 'difference',
- '%documented', '%badname')
- for node_type in ('module', 'class', 'method', 'function'):
- new = stats[node_type]
- old = old_stats.get(node_type, None)
- if old is not None:
- diff_str = reporters.diff_string(old, new)
- else:
- old, diff_str = 'NC', 'NC'
- lines += (node_type, str(new), str(old), diff_str,
- nice_stats[node_type].get('percent_documented', '0'),
- nice_stats[node_type].get('percent_badname', '0'))
- sect.append(reporter_nodes.Table(children=lines, cols=6, rheaders=1))
-
-
-def redefined_by_decorator(node):
- """return True if the object is a method redefined via decorator.
-
- For example:
- @property
- def x(self): return self._x
- @x.setter
- def x(self, value): self._x = value
- """
- if node.decorators:
- for decorator in node.decorators.nodes:
- if (isinstance(decorator, astroid.Attribute) and
- getattr(decorator.expr, 'name', None) == node.name):
- return True
- return False
-
-
-class _BasicChecker(checkers.BaseChecker):
- __implements__ = interfaces.IAstroidChecker
- name = 'basic'
-
-
-class BasicErrorChecker(_BasicChecker):
- msgs = {
- 'E0100': ('__init__ method is a generator',
- 'init-is-generator',
- 'Used when the special class method __init__ is turned into a '
- 'generator by a yield in its body.'),
- 'E0101': ('Explicit return in __init__',
- 'return-in-init',
- 'Used when the special class method __init__ has an explicit '
- 'return value.'),
- 'E0102': ('%s already defined line %s',
- 'function-redefined',
- 'Used when a function / class / method is redefined.'),
- 'E0103': ('%r not properly in loop',
- 'not-in-loop',
- 'Used when break or continue keywords are used outside a loop.'),
- 'E0104': ('Return outside function',
- 'return-outside-function',
- 'Used when a "return" statement is found outside a function or '
- 'method.'),
- 'E0105': ('Yield outside function',
- 'yield-outside-function',
- 'Used when a "yield" statement is found outside a function or '
- 'method.'),
- 'E0106': ('Return with argument inside generator',
- 'return-arg-in-generator',
- 'Used when a "return" statement with an argument is found '
- 'outside in a generator function or method (e.g. with some '
- '"yield" statements).',
- {'maxversion': (3, 3)}),
- 'E0107': ("Use of the non-existent %s operator",
- 'nonexistent-operator',
- "Used when you attempt to use the C-style pre-increment or"
- "pre-decrement operator -- and ++, which doesn't exist in Python."),
- 'E0108': ('Duplicate argument name %s in function definition',
- 'duplicate-argument-name',
- 'Duplicate argument names in function definitions are syntax'
- ' errors.'),
- 'E0110': ('Abstract class %r with abstract methods instantiated',
- 'abstract-class-instantiated',
- 'Used when an abstract class with `abc.ABCMeta` as metaclass '
- 'has abstract methods and is instantiated.'),
- 'W0120': ('Else clause on loop without a break statement',
- 'useless-else-on-loop',
- 'Loops should only have an else clause if they can exit early '
- 'with a break statement, otherwise the statements under else '
- 'should be on the same scope as the loop itself.'),
- 'E0112': ('More than one starred expression in assignment',
- 'too-many-star-expressions',
- 'Emitted when there are more than one starred '
- 'expressions (`*x`) in an assignment. This is a SyntaxError.',
- {'minversion': (3, 0)}),
- 'E0113': ('Starred assignment target must be in a list or tuple',
- 'invalid-star-assignment-target',
- 'Emitted when a star expression is used as a starred '
- 'assignment target.',
- {'minversion': (3, 0)}),
- 'E0114': ('Can use starred expression only in assignment target',
- 'star-needs-assignment-target',
- 'Emitted when a star expression is not used in an '
- 'assignment target.',
- {'minversion': (3, 0)}),
- 'E0115': ('Name %r is nonlocal and global',
- 'nonlocal-and-global',
- 'Emitted when a name is both nonlocal and global.',
- {'minversion': (3, 0)}),
- 'E0116': ("'continue' not supported inside 'finally' clause",
- 'continue-in-finally',
- 'Emitted when the `continue` keyword is found '
- 'inside a finally clause, which is a SyntaxError.'),
- 'E0117': ("nonlocal name %s found without binding",
- 'nonlocal-without-binding',
- 'Emitted when a nonlocal variable does not have an attached '
- 'name somewhere in the parent scopes',
- {'minversion': (3, 0)}),
- 'E0118': ("Name %r is used prior to global declaration",
- 'used-prior-global-declaration',
- 'Emitted when a name is used prior a global declaration, '
- 'which results in an error since Python 3.6.',
- {'minversion': (3, 6)}),
- }
-
- @utils.check_messages('function-redefined')
- def visit_classdef(self, node):
- self._check_redefinition('class', node)
-
- @utils.check_messages('too-many-star-expressions',
- 'invalid-star-assignment-target')
- def visit_assign(self, node):
- starred = list(node.targets[0].nodes_of_class(astroid.Starred))
- if len(starred) > 1:
- self.add_message('too-many-star-expressions', node=node)
-
- # Check *a = b
- if isinstance(node.targets[0], astroid.Starred):
- self.add_message('invalid-star-assignment-target', node=node)
-
- @utils.check_messages('star-needs-assignment-target')
- def visit_starred(self, node):
- """Check that a Starred expression is used in an assignment target."""
- if isinstance(node.parent, astroid.Call):
- # f(*args) is converted to Call(args=[Starred]), so ignore
- # them for this check.
- return
- if PY35 and isinstance(node.parent,
- (astroid.List, astroid.Tuple,
- astroid.Set, astroid.Dict)):
- # PEP 448 unpacking.
- return
-
- stmt = node.statement()
- if not isinstance(stmt, astroid.Assign):
- return
-
- if stmt.value is node or stmt.value.parent_of(node):
- self.add_message('star-needs-assignment-target', node=node)
-
- @utils.check_messages('init-is-generator', 'return-in-init',
- 'function-redefined', 'return-arg-in-generator',
- 'duplicate-argument-name', 'nonlocal-and-global',
- 'used-prior-global-declaration')
- def visit_functiondef(self, node):
- self._check_nonlocal_and_global(node)
- self._check_name_used_prior_global(node)
- if (not redefined_by_decorator(node) and
- not utils.is_registered_in_singledispatch_function(node)):
- self._check_redefinition(node.is_method() and 'method' or 'function', node)
- # checks for max returns, branch, return in __init__
- returns = node.nodes_of_class(astroid.Return,
- skip_klass=(astroid.FunctionDef,
- astroid.ClassDef))
- if node.is_method() and node.name == '__init__':
- if node.is_generator():
- self.add_message('init-is-generator', node=node)
- else:
- values = [r.value for r in returns]
- # Are we returning anything but None from constructors
- if any(v for v in values if not utils.is_none(v)):
- self.add_message('return-in-init', node=node)
- elif node.is_generator():
- # make sure we don't mix non-None returns and yields
- if not PY33:
- for retnode in returns:
- if isinstance(retnode.value, astroid.Const) and \
- retnode.value.value is not None:
- self.add_message('return-arg-in-generator', node=node,
- line=retnode.fromlineno)
- # Check for duplicate names
- args = set()
- for name in node.argnames():
- if name in args:
- self.add_message('duplicate-argument-name', node=node, args=(name,))
- else:
- args.add(name)
-
- visit_asyncfunctiondef = visit_functiondef
-
- def _check_name_used_prior_global(self, node):
-
- scope_globals = {
- name: child
- for child in node.nodes_of_class(astroid.Global)
- for name in child.names
- if child.scope() is node
- }
-
- for node_name in node.nodes_of_class(astroid.Name):
- if node_name.scope() is not node:
- continue
-
- name = node_name.name
- corresponding_global = scope_globals.get(name)
- if not corresponding_global:
- continue
-
- global_lineno = corresponding_global.fromlineno
- if global_lineno and global_lineno > node_name.fromlineno:
- self.add_message('used-prior-global-declaration',
- node=node_name, args=(name, ))
-
- def _check_nonlocal_and_global(self, node):
- """Check that a name is both nonlocal and global."""
- def same_scope(current):
- return current.scope() is node
-
- from_iter = itertools.chain.from_iterable
- nonlocals = set(from_iter(
- child.names for child in node.nodes_of_class(astroid.Nonlocal)
- if same_scope(child)))
- global_vars = set(from_iter(
- child.names for child in node.nodes_of_class(astroid.Global)
- if same_scope(child)))
- for name in nonlocals.intersection(global_vars):
- self.add_message('nonlocal-and-global',
- args=(name, ), node=node)
-
- @utils.check_messages('return-outside-function')
- def visit_return(self, node):
- if not isinstance(node.frame(), astroid.FunctionDef):
- self.add_message('return-outside-function', node=node)
-
- @utils.check_messages('yield-outside-function')
- def visit_yield(self, node):
- self._check_yield_outside_func(node)
-
- @utils.check_messages('yield-outside-function')
- def visit_yieldfrom(self, node):
- self._check_yield_outside_func(node)
-
- @utils.check_messages('not-in-loop', 'continue-in-finally')
- def visit_continue(self, node):
- self._check_in_loop(node, 'continue')
-
- @utils.check_messages('not-in-loop')
- def visit_break(self, node):
- self._check_in_loop(node, 'break')
-
- @utils.check_messages('useless-else-on-loop')
- def visit_for(self, node):
- self._check_else_on_loop(node)
-
- @utils.check_messages('useless-else-on-loop')
- def visit_while(self, node):
- self._check_else_on_loop(node)
-
- @utils.check_messages('nonexistent-operator')
- def visit_unaryop(self, node):
- """check use of the non-existent ++ and -- operator operator"""
- if ((node.op in '+-') and
- isinstance(node.operand, astroid.UnaryOp) and
- (node.operand.op == node.op)):
- self.add_message('nonexistent-operator', node=node, args=node.op*2)
-
- def _check_nonlocal_without_binding(self, node, name):
- current_scope = node.scope()
- while True:
- if current_scope.parent is None:
- break
-
- if not isinstance(current_scope, astroid.FunctionDef):
- self.add_message('nonlocal-without-binding', args=(name, ),
- node=node)
- return
- else:
- if name not in current_scope.locals:
- current_scope = current_scope.parent.scope()
- continue
- else:
- # Okay, found it.
- return
-
- self.add_message('nonlocal-without-binding', args=(name, ), node=node)
-
- @utils.check_messages('nonlocal-without-binding')
- def visit_nonlocal(self, node):
- for name in node.names:
- self._check_nonlocal_without_binding(node, name)
-
- @utils.check_messages('abstract-class-instantiated')
- def visit_call(self, node):
- """ Check instantiating abstract class with
- abc.ABCMeta as metaclass.
- """
- try:
- infered = next(node.func.infer())
- except astroid.InferenceError:
- return
-
- if not isinstance(infered, astroid.ClassDef):
- return
-
- klass = utils.node_frame_class(node)
- if klass is infered:
- # Don't emit the warning if the class is instantiated
- # in its own body or if the call is not an instance
- # creation. If the class is instantiated into its own
- # body, we're expecting that it knows what it is doing.
- return
-
- # __init__ was called
- metaclass = infered.metaclass()
- abstract_methods = _has_abstract_methods(infered)
- if metaclass is None:
- # Python 3.4 has `abc.ABC`, which won't be detected
- # by ClassNode.metaclass()
- for ancestor in infered.ancestors():
- if ancestor.qname() == 'abc.ABC' and abstract_methods:
- self.add_message('abstract-class-instantiated',
- args=(infered.name, ),
- node=node)
- break
- return
- if metaclass.qname() == 'abc.ABCMeta' and abstract_methods:
- self.add_message('abstract-class-instantiated',
- args=(infered.name, ),
- node=node)
-
- def _check_yield_outside_func(self, node):
- if not isinstance(node.frame(), (astroid.FunctionDef, astroid.Lambda)):
- self.add_message('yield-outside-function', node=node)
-
- def _check_else_on_loop(self, node):
- """Check that any loop with an else clause has a break statement."""
- if node.orelse and not _loop_exits_early(node):
- self.add_message('useless-else-on-loop', node=node,
- # This is not optimal, but the line previous
- # to the first statement in the else clause
- # will usually be the one that contains the else:.
- line=node.orelse[0].lineno - 1)
-
- def _check_in_loop(self, node, node_name):
- """check that a node is inside a for or while loop"""
- _node = node.parent
- while _node:
- if isinstance(_node, (astroid.For, astroid.While)):
- if node not in _node.orelse:
- return
-
- if isinstance(_node, (astroid.ClassDef, astroid.FunctionDef)):
- break
- if (isinstance(_node, astroid.TryFinally)
- and node in _node.finalbody
- and isinstance(node, astroid.Continue)):
- self.add_message('continue-in-finally', node=node)
-
- _node = _node.parent
-
- self.add_message('not-in-loop', node=node, args=node_name)
-
- def _check_redefinition(self, redeftype, node):
- """check for redefinition of a function / method / class name"""
- defined_self = node.parent.frame()[node.name]
- if defined_self is not node and not astroid.are_exclusive(node, defined_self):
- self.add_message('function-redefined', node=node,
- args=(redeftype, defined_self.fromlineno))
-
-
-class BasicChecker(_BasicChecker):
- """checks for :
- * doc strings
- * number of arguments, local variables, branches, returns and statements in
-functions, methods
- * required module attributes
- * dangerous default values as arguments
- * redefinition of function / method / class
- * uses of the global statement
- """
-
- __implements__ = interfaces.IAstroidChecker
-
- name = 'basic'
- msgs = {
- 'W0101': ('Unreachable code',
- 'unreachable',
- 'Used when there is some code behind a "return" or "raise" '
- 'statement, which will never be accessed.'),
- 'W0102': ('Dangerous default value %s as argument',
- 'dangerous-default-value',
- 'Used when a mutable value as list or dictionary is detected in '
- 'a default value for an argument.'),
- 'W0104': ('Statement seems to have no effect',
- 'pointless-statement',
- 'Used when a statement doesn\'t have (or at least seems to) '
- 'any effect.'),
- 'W0105': ('String statement has no effect',
- 'pointless-string-statement',
- 'Used when a string is used as a statement (which of course '
- 'has no effect). This is a particular case of W0104 with its '
- 'own message so you can easily disable it if you\'re using '
- 'those strings as documentation, instead of comments.'),
- 'W0106': ('Expression "%s" is assigned to nothing',
- 'expression-not-assigned',
- 'Used when an expression that is not a function call is assigned '
- 'to nothing. Probably something else was intended.'),
- 'W0108': ('Lambda may not be necessary',
- 'unnecessary-lambda',
- 'Used when the body of a lambda expression is a function call '
- 'on the same argument list as the lambda itself; such lambda '
- 'expressions are in all but a few cases replaceable with the '
- 'function being called in the body of the lambda.'),
- 'W0109': ("Duplicate key %r in dictionary",
- 'duplicate-key',
- 'Used when a dictionary expression binds the same key multiple '
- 'times.'),
- 'W0122': ('Use of exec',
- 'exec-used',
- 'Used when you use the "exec" statement (function for Python '
- '3), to discourage its usage. That doesn\'t '
- 'mean you cannot use it !'),
- 'W0123': ('Use of eval',
- 'eval-used',
- 'Used when you use the "eval" function, to discourage its '
- 'usage. Consider using `ast.literal_eval` for safely evaluating '
- 'strings containing Python expressions '
- 'from untrusted sources. '),
- 'W0150': ("%s statement in finally block may swallow exception",
- 'lost-exception',
- 'Used when a break or a return statement is found inside the '
- 'finally clause of a try...finally block: the exceptions raised '
- 'in the try clause will be silently swallowed instead of being '
- 're-raised.'),
- 'W0199': ('Assert called on a 2-uple. Did you mean \'assert x,y\'?',
- 'assert-on-tuple',
- 'A call of assert on a tuple will always evaluate to true if '
- 'the tuple is not empty, and will always evaluate to false if '
- 'it is.'),
- 'W0124': ('Following "as" with another context manager looks like a tuple.',
- 'confusing-with-statement',
- 'Emitted when a `with` statement component returns multiple values '
- 'and uses name binding with `as` only for a part of those values, '
- 'as in with ctx() as a, b. This can be misleading, since it\'s not '
- 'clear if the context manager returns a tuple or if the node without '
- 'a name binding is another context manager.'),
- 'W0125': ('Using a conditional statement with a constant value',
- 'using-constant-test',
- 'Emitted when a conditional statement (If or ternary if) '
- 'uses a constant value for its test. This might not be what '
- 'the user intended to do.'),
- 'E0111': ('The first reversed() argument is not a sequence',
- 'bad-reversed-sequence',
- 'Used when the first argument to reversed() builtin '
- 'isn\'t a sequence (does not implement __reversed__, '
- 'nor __getitem__ and __len__'),
-
- }
-
- reports = (('RP0101', 'Statistics by type', report_by_type_stats),)
-
- def __init__(self, linter):
- _BasicChecker.__init__(self, linter)
- self.stats = None
- self._tryfinallys = None
-
- def open(self):
- """initialize visit variables and statistics
- """
- self._tryfinallys = []
- self.stats = self.linter.add_stats(module=0, function=0,
- method=0, class_=0)
-
- @utils.check_messages('using-constant-test')
- def visit_if(self, node):
- self._check_using_constant_test(node, node.test)
-
- @utils.check_messages('using-constant-test')
- def visit_ifexp(self, node):
- self._check_using_constant_test(node, node.test)
-
- @utils.check_messages('using-constant-test')
- def visit_comprehension(self, node):
- if node.ifs:
- for if_test in node.ifs:
- self._check_using_constant_test(node, if_test)
-
- def _check_using_constant_test(self, node, test):
- const_nodes = (
- astroid.Module,
- astroid.scoped_nodes.GeneratorExp,
- astroid.Lambda, astroid.FunctionDef, astroid.ClassDef,
- astroid.bases.Generator, astroid.UnboundMethod,
- astroid.BoundMethod, astroid.Module)
- structs = (astroid.Dict, astroid.Tuple, astroid.Set)
-
- # These nodes are excepted, since they are not constant
- # values, requiring a computation to happen. The only type
- # of node in this list which doesn't have this property is
- # Getattr, which is excepted because the conditional statement
- # can be used to verify that the attribute was set inside a class,
- # which is definitely a valid use case.
- except_nodes = (astroid.Attribute, astroid.Call,
- astroid.BinOp, astroid.BoolOp, astroid.UnaryOp,
- astroid.Subscript)
- inferred = None
- emit = isinstance(test, (astroid.Const, ) + structs + const_nodes)
- if not isinstance(test, except_nodes):
- inferred = utils.safe_infer(test)
-
- if emit or isinstance(inferred, const_nodes):
- self.add_message('using-constant-test', node=node)
-
- def visit_module(self, _):
- """check module name, docstring and required arguments
- """
- self.stats['module'] += 1
-
- def visit_classdef(self, node): # pylint: disable=unused-argument
- """check module name, docstring and redefinition
- increment branch counter
- """
- self.stats['class'] += 1
-
- @utils.check_messages('pointless-statement', 'pointless-string-statement',
- 'expression-not-assigned')
- def visit_expr(self, node):
- """check for various kind of statements without effect"""
- expr = node.value
- if isinstance(expr, astroid.Const) and isinstance(expr.value,
- six.string_types):
- # treat string statement in a separated message
- # Handle PEP-257 attribute docstrings.
- # An attribute docstring is defined as being a string right after
- # an assignment at the module level, class level or __init__ level.
- scope = expr.scope()
- if isinstance(scope, (astroid.ClassDef, astroid.Module, astroid.FunctionDef)):
- if isinstance(scope, astroid.FunctionDef) and scope.name != '__init__':
- pass
- else:
- sibling = expr.previous_sibling()
- if (sibling is not None and sibling.scope() is scope and
- isinstance(sibling, astroid.Assign)):
- return
- self.add_message('pointless-string-statement', node=node)
- return
- # ignore if this is :
- # * a direct function call
- # * the unique child of a try/except body
- # * a yield (which are wrapped by a discard node in _ast XXX)
- # warn W0106 if we have any underlying function call (we can't predict
- # side effects), else pointless-statement
- if (isinstance(expr, (astroid.Yield, astroid.Await, astroid.Call)) or
- (isinstance(node.parent, astroid.TryExcept) and
- node.parent.body == [node])):
- return
- if any(expr.nodes_of_class(astroid.Call)):
- self.add_message('expression-not-assigned', node=node,
- args=expr.as_string())
- else:
- self.add_message('pointless-statement', node=node)
-
- @staticmethod
- def _filter_vararg(node, call_args):
- # Return the arguments for the given call which are
- # not passed as vararg.
- for arg in call_args:
- if isinstance(arg, astroid.Starred):
- if (isinstance(arg.value, astroid.Name)
- and arg.value.name != node.args.vararg):
- yield arg
- else:
- yield arg
-
- @staticmethod
- def _has_variadic_argument(args, variadic_name):
- if not args:
- return True
- for arg in args:
- if isinstance(arg.value, astroid.Name):
- if arg.value.name != variadic_name:
- return True
- else:
- return True
- return False
-
- @utils.check_messages('unnecessary-lambda')
- def visit_lambda(self, node):
- """check whether or not the lambda is suspicious
- """
- # if the body of the lambda is a call expression with the same
- # argument list as the lambda itself, then the lambda is
- # possibly unnecessary and at least suspicious.
- if node.args.defaults:
- # If the arguments of the lambda include defaults, then a
- # judgment cannot be made because there is no way to check
- # that the defaults defined by the lambda are the same as
- # the defaults defined by the function called in the body
- # of the lambda.
- return
- call = node.body
- if not isinstance(call, astroid.Call):
- # The body of the lambda must be a function call expression
- # for the lambda to be unnecessary.
- return
- if (isinstance(node.body.func, astroid.Attribute) and
- isinstance(node.body.func.expr, astroid.Call)):
- # Chained call, the intermediate call might
- # return something else (but we don't check that, yet).
- return
-
- ordinary_args = list(node.args.args)
- new_call_args = list(self._filter_vararg(node, call.args))
- if node.args.kwarg:
- if self._has_variadic_argument(call.kwargs, node.args.kwarg):
- return
- elif call.kwargs or call.keywords:
- return
-
- if node.args.vararg:
- if self._has_variadic_argument(call.starargs, node.args.vararg):
- return
- elif call.starargs:
- return
-
- # The "ordinary" arguments must be in a correspondence such that:
- # ordinary_args[i].name == call.args[i].name.
- if len(ordinary_args) != len(new_call_args):
- return
- for arg, passed_arg in zip(ordinary_args, new_call_args):
- if not isinstance(passed_arg, astroid.Name):
- return
- if arg.name != passed_arg.name:
- return
-
- self.add_message('unnecessary-lambda', line=node.fromlineno,
- node=node)
-
- @utils.check_messages('dangerous-default-value')
- def visit_functiondef(self, node):
- """check function name, docstring, arguments, redefinition,
- variable names, max locals
- """
- self.stats[node.is_method() and 'method' or 'function'] += 1
- self._check_dangerous_default(node)
-
- visit_asyncfunctiondef = visit_functiondef
-
- def _check_dangerous_default(self, node):
- # check for dangerous default values as arguments
- is_iterable = lambda n: isinstance(n, (astroid.List,
- astroid.Set,
- astroid.Dict))
- for default in node.args.defaults:
- try:
- value = next(default.infer())
- except astroid.InferenceError:
- continue
-
- if (isinstance(value, astroid.Instance) and
- value.qname() in DEFAULT_ARGUMENT_SYMBOLS):
-
- if value is default:
- msg = DEFAULT_ARGUMENT_SYMBOLS[value.qname()]
- elif isinstance(value, astroid.Instance) or is_iterable(value):
- # We are here in the following situation(s):
- # * a dict/set/list/tuple call which wasn't inferred
- # to a syntax node ({}, () etc.). This can happen
- # when the arguments are invalid or unknown to
- # the inference.
- # * a variable from somewhere else, which turns out to be a list
- # or a dict.
- if is_iterable(default):
- msg = value.pytype()
- elif isinstance(default, astroid.Call):
- msg = '%s() (%s)' % (value.name, value.qname())
- else:
- msg = '%s (%s)' % (default.as_string(), value.qname())
- else:
- # this argument is a name
- msg = '%s (%s)' % (default.as_string(),
- DEFAULT_ARGUMENT_SYMBOLS[value.qname()])
- self.add_message('dangerous-default-value',
- node=node,
- args=(msg, ))
-
- @utils.check_messages('unreachable', 'lost-exception')
- def visit_return(self, node):
- """1 - check is the node has a right sibling (if so, that's some
- unreachable code)
- 2 - check is the node is inside the finally clause of a try...finally
- block
- """
- self._check_unreachable(node)
- # Is it inside final body of a try...finally bloc ?
- self._check_not_in_finally(node, 'return', (astroid.FunctionDef,))
-
- @utils.check_messages('unreachable')
- def visit_continue(self, node):
- """check is the node has a right sibling (if so, that's some unreachable
- code)
- """
- self._check_unreachable(node)
-
- @utils.check_messages('unreachable', 'lost-exception')
- def visit_break(self, node):
- """1 - check is the node has a right sibling (if so, that's some
- unreachable code)
- 2 - check is the node is inside the finally clause of a try...finally
- block
- """
- # 1 - Is it right sibling ?
- self._check_unreachable(node)
- # 2 - Is it inside final body of a try...finally bloc ?
- self._check_not_in_finally(node, 'break', (astroid.For, astroid.While,))
-
- @utils.check_messages('unreachable')
- def visit_raise(self, node):
- """check if the node has a right sibling (if so, that's some unreachable
- code)
- """
- self._check_unreachable(node)
-
- @utils.check_messages('exec-used')
- def visit_exec(self, node):
- """just print a warning on exec statements"""
- self.add_message('exec-used', node=node)
-
- @utils.check_messages('eval-used', 'exec-used', 'bad-reversed-sequence')
- def visit_call(self, node):
- """visit a CallFunc node -> check if this is not a blacklisted builtin
- call and check for * or ** use
- """
- if isinstance(node.func, astroid.Name):
- name = node.func.name
- # ignore the name if it's not a builtin (i.e. not defined in the
- # locals nor globals scope)
- if not (name in node.frame() or
- name in node.root()):
- if name == 'exec':
- self.add_message('exec-used', node=node)
- elif name == 'reversed':
- self._check_reversed(node)
- elif name == 'eval':
- self.add_message('eval-used', node=node)
-
- @utils.check_messages('assert-on-tuple')
- def visit_assert(self, node):
- """check the use of an assert statement on a tuple."""
- if node.fail is None and isinstance(node.test, astroid.Tuple) and \
- len(node.test.elts) == 2:
- self.add_message('assert-on-tuple', node=node)
-
- @utils.check_messages('duplicate-key')
- def visit_dict(self, node):
- """check duplicate key in dictionary"""
- keys = set()
- for k, _ in node.items:
- if isinstance(k, astroid.Const):
- key = k.value
- if key in keys:
- self.add_message('duplicate-key', node=node, args=key)
- keys.add(key)
-
- def visit_tryfinally(self, node):
- """update try...finally flag"""
- self._tryfinallys.append(node)
-
- def leave_tryfinally(self, node): # pylint: disable=unused-argument
- """update try...finally flag"""
- self._tryfinallys.pop()
-
- def _check_unreachable(self, node):
- """check unreachable code"""
- unreach_stmt = node.next_sibling()
- if unreach_stmt is not None:
- self.add_message('unreachable', node=unreach_stmt)
-
- def _check_not_in_finally(self, node, node_name, breaker_classes=()):
- """check that a node is not inside a finally clause of a
- try...finally statement.
- If we found before a try...finally bloc a parent which its type is
- in breaker_classes, we skip the whole check."""
- # if self._tryfinallys is empty, we're not a in try...finally bloc
- if not self._tryfinallys:
- return
- # the node could be a grand-grand...-children of the try...finally
- _parent = node.parent
- _node = node
- while _parent and not isinstance(_parent, breaker_classes):
- if hasattr(_parent, 'finalbody') and _node in _parent.finalbody:
- self.add_message('lost-exception', node=node, args=node_name)
- return
- _node = _parent
- _parent = _node.parent
-
- def _check_reversed(self, node):
- """ check that the argument to `reversed` is a sequence """
- try:
- argument = utils.safe_infer(utils.get_argument_from_call(node, position=0))
- except utils.NoSuchArgumentError:
- pass
- else:
- if argument is astroid.YES:
- return
- if argument is None:
- # Nothing was infered.
- # Try to see if we have iter().
- if isinstance(node.args[0], astroid.Call):
- try:
- func = next(node.args[0].func.infer())
- except astroid.InferenceError:
- return
- if (getattr(func, 'name', None) == 'iter' and
- utils.is_builtin_object(func)):
- self.add_message('bad-reversed-sequence', node=node)
- return
-
- if isinstance(argument, astroid.Instance):
- if (argument._proxied.name == 'dict' and
- utils.is_builtin_object(argument._proxied)):
- self.add_message('bad-reversed-sequence', node=node)
- return
- elif any(ancestor.name == 'dict' and utils.is_builtin_object(ancestor)
- for ancestor in argument._proxied.ancestors()):
- # Mappings aren't accepted by reversed(), unless
- # they provide explicitly a __reversed__ method.
- try:
- argument.locals[REVERSED_PROTOCOL_METHOD]
- except KeyError:
- self.add_message('bad-reversed-sequence', node=node)
- return
-
- for methods in REVERSED_METHODS:
- for meth in methods:
- try:
- argument.getattr(meth)
- except astroid.NotFoundError:
- break
- else:
- break
- else:
- self.add_message('bad-reversed-sequence', node=node)
- elif not isinstance(argument, (astroid.List, astroid.Tuple)):
- # everything else is not a proper sequence for reversed()
- self.add_message('bad-reversed-sequence', node=node)
-
- @utils.check_messages('confusing-with-statement')
- def visit_with(self, node):
- if not PY3K:
- # in Python 2 a "with" statement with multiple managers coresponds
- # to multiple nested AST "With" nodes
- pairs = []
- parent_node = node.parent
- if isinstance(parent_node, astroid.With):
- # we only care about the direct parent, since this method
- # gets called for each with node anyway
- pairs.extend(parent_node.items)
- pairs.extend(node.items)
- else:
- # in PY3K a "with" statement with multiple managers coresponds
- # to one AST "With" node with multiple items
- pairs = node.items
- if pairs:
- for prev_pair, pair in zip(pairs, pairs[1:]):
- if (isinstance(prev_pair[1], astroid.AssignName) and
- (pair[1] is None and not isinstance(pair[0], astroid.Call))):
- # don't emit a message if the second is a function call
- # there's no way that can be mistaken for a name assignment
- if PY3K or node.lineno == node.parent.lineno:
- # if the line number doesn't match
- # we assume it's a nested "with"
- self.add_message('confusing-with-statement', node=node)
-
-
-_NAME_TYPES = {
- 'module': (MOD_NAME_RGX, 'module'),
- 'const': (CONST_NAME_RGX, 'constant'),
- 'class': (CLASS_NAME_RGX, 'class'),
- 'function': (DEFAULT_NAME_RGX, 'function'),
- 'method': (DEFAULT_NAME_RGX, 'method'),
- 'attr': (DEFAULT_NAME_RGX, 'attribute'),
- 'argument': (DEFAULT_NAME_RGX, 'argument'),
- 'variable': (DEFAULT_NAME_RGX, 'variable'),
- 'class_attribute': (CLASS_ATTRIBUTE_RGX, 'class attribute'),
- 'inlinevar': (COMP_VAR_RGX, 'inline iteration'),
-}
-
-
-def _create_naming_options():
- name_options = []
- for name_type, (rgx, human_readable_name) in six.iteritems(_NAME_TYPES):
- name_type = name_type.replace('_', '-')
- name_options.append((
- '%s-rgx' % (name_type,),
- {'default': rgx, 'type': 'regexp', 'metavar': '',
- 'help': 'Regular expression matching correct %s names' % (human_readable_name,)}))
- name_options.append((
- '%s-name-hint' % (name_type,),
- {'default': rgx.pattern, 'type': 'string', 'metavar': '',
- 'help': 'Naming hint for %s names' % (human_readable_name,)}))
- return tuple(name_options)
-
-
-class NameChecker(_BasicChecker):
-
- msgs = {
- 'C0102': ('Black listed name "%s"',
- 'blacklisted-name',
- 'Used when the name is listed in the black list (unauthorized '
- 'names).'),
- 'C0103': ('Invalid %s name "%s"%s',
- 'invalid-name',
- 'Used when the name doesn\'t match the regular expression '
- 'associated to its type (constant, variable, class...).'),
- 'W0111': ('Name %s will become a keyword in Python %s',
- 'assign-to-new-keyword',
- 'Used when assignment will become invalid in future '
- 'Python release due to introducing new keyword'),
- }
-
- options = (('good-names',
- {'default' : ('i', 'j', 'k', 'ex', 'Run', '_'),
- 'type' :'csv', 'metavar' : '',
- 'help' : 'Good variable names which should always be accepted,'
- ' separated by a comma'}
- ),
- ('bad-names',
- {'default' : ('foo', 'bar', 'baz', 'toto', 'tutu', 'tata'),
- 'type' :'csv', 'metavar' : '',
- 'help' : 'Bad variable names which should always be refused, '
- 'separated by a comma'}
- ),
- ('name-group',
- {'default' : (),
- 'type' :'csv', 'metavar' : '',
- 'help' : ('Colon-delimited sets of names that determine each'
- ' other\'s naming style when the name regexes'
- ' allow several styles.')}
- ),
- ('include-naming-hint',
- {'default': False, 'type' : 'yn', 'metavar' : '',
- 'help': 'Include a hint for the correct naming format with invalid-name'}
- ),
- ('property-classes',
- {'default': ('abc.abstractproperty',),
- 'type': 'csv',
- 'metavar': '',
- 'help': 'List of decorators that produce properties, such as '
- 'abc.abstractproperty. Add to this list to register '
- 'other decorators that produce valid properties.'}
- ),
- ) + _create_naming_options()
-
- KEYWORD_ONSET = {
- (3, 0): {'True', 'False'},
- (3, 7): {'async', 'await'}
- }
-
- def __init__(self, linter):
- _BasicChecker.__init__(self, linter)
- self._name_category = {}
- self._name_group = {}
- self._bad_names = {}
-
- def open(self):
- self.stats = self.linter.add_stats(badname_module=0,
- badname_class=0, badname_function=0,
- badname_method=0, badname_attr=0,
- badname_const=0,
- badname_variable=0,
- badname_inlinevar=0,
- badname_argument=0,
- badname_class_attribute=0)
- for group in self.config.name_group:
- for name_type in group.split(':'):
- self._name_group[name_type] = 'group_%s' % (group,)
-
- @utils.check_messages('blacklisted-name', 'invalid-name')
- def visit_module(self, node):
- self._check_name('module', node.name.split('.')[-1], node)
- self._bad_names = {}
-
- def leave_module(self, node): # pylint: disable=unused-argument
- for all_groups in six.itervalues(self._bad_names):
- if len(all_groups) < 2:
- continue
- groups = collections.defaultdict(list)
- min_warnings = sys.maxsize
- for group in six.itervalues(all_groups):
- groups[len(group)].append(group)
- min_warnings = min(len(group), min_warnings)
- if len(groups[min_warnings]) > 1:
- by_line = sorted(groups[min_warnings],
- key=lambda group: min(warning[0].lineno for warning in group))
- warnings = itertools.chain(*by_line[1:])
- else:
- warnings = groups[min_warnings][0]
- for args in warnings:
- self._raise_name_warning(*args)
-
- @utils.check_messages('blacklisted-name', 'invalid-name')
- def visit_classdef(self, node):
- self._check_name('class', node.name, node)
- for attr, anodes in six.iteritems(node.instance_attrs):
- if not any(node.instance_attr_ancestors(attr)):
- self._check_name('attr', attr, anodes[0])
-
- @utils.check_messages('blacklisted-name', 'invalid-name')
- def visit_functiondef(self, node):
- # Do not emit any warnings if the method is just an implementation
- # of a base class method.
- confidence = interfaces.HIGH
- if node.is_method():
- if utils.overrides_a_method(node.parent.frame(), node.name):
- return
- confidence = (interfaces.INFERENCE if utils.has_known_bases(node.parent.frame())
- else interfaces.INFERENCE_FAILURE)
-
- self._check_name(_determine_function_name_type(node,
- config=self.config),
- node.name, node, confidence)
- # Check argument names
- args = node.args.args
- if args is not None:
- self._recursive_check_names(args, node)
-
- visit_asyncfunctiondef = visit_functiondef
-
- @utils.check_messages('blacklisted-name', 'invalid-name')
- def visit_global(self, node):
- for name in node.names:
- self._check_name('const', name, node)
-
- @utils.check_messages('blacklisted-name', 'invalid-name')
- def visit_assignname(self, node):
- """check module level assigned names"""
- keyword_first_version = self._name_became_keyword_in_version(
- node.name, self.KEYWORD_ONSET
- )
- if keyword_first_version is not None:
- self.add_message('assign-to-new-keyword',
- node=node, args=(node.name, keyword_first_version),
- confidence=interfaces.HIGH)
-
- frame = node.frame()
- ass_type = node.assign_type()
- if isinstance(ass_type, astroid.Comprehension):
- self._check_name('inlinevar', node.name, node)
- elif isinstance(frame, astroid.Module):
- if isinstance(ass_type, astroid.Assign) and not in_loop(ass_type):
- if isinstance(utils.safe_infer(ass_type.value), astroid.ClassDef):
- self._check_name('class', node.name, node)
- else:
- if not _redefines_import(node):
- # Don't emit if the name redefines an import
- # in an ImportError except handler.
- self._check_name('const', node.name, node)
- elif isinstance(ass_type, astroid.ExceptHandler):
- self._check_name('variable', node.name, node)
- elif isinstance(frame, astroid.FunctionDef):
- # global introduced variable aren't in the function locals
- if node.name in frame and node.name not in frame.argnames():
- if not _redefines_import(node):
- self._check_name('variable', node.name, node)
- elif isinstance(frame, astroid.ClassDef):
- if not list(frame.local_attr_ancestors(node.name)):
- self._check_name('class_attribute', node.name, node)
-
- def _recursive_check_names(self, args, node):
- """check names in a possibly recursive list """
- for arg in args:
- if isinstance(arg, astroid.AssignName):
- self._check_name('argument', arg.name, node)
- else:
- self._recursive_check_names(arg.elts, node)
-
- def _find_name_group(self, node_type):
- return self._name_group.get(node_type, node_type)
-
- def _raise_name_warning(self, node, node_type, name, confidence):
- type_label = _NAME_TYPES[node_type][1]
- hint = ''
- if self.config.include_naming_hint:
- hint = ' (hint: %s)' % (getattr(self.config, node_type + '_name_hint'))
- self.add_message('invalid-name', node=node, args=(type_label, name, hint),
- confidence=confidence)
- self.stats['badname_' + node_type] += 1
-
- def _check_name(self, node_type, name, node, confidence=interfaces.HIGH):
- """check for a name using the type's regexp"""
- if utils.is_inside_except(node):
- clobbering, _ = utils.clobber_in_except(node)
- if clobbering:
- return
- if name in self.config.good_names:
- return
- if name in self.config.bad_names:
- self.stats['badname_' + node_type] += 1
- self.add_message('blacklisted-name', node=node, args=name)
- return
- regexp = getattr(self.config, node_type + '_rgx')
- match = regexp.match(name)
-
- if _is_multi_naming_match(match, node_type, confidence):
- name_group = self._find_name_group(node_type)
- bad_name_group = self._bad_names.setdefault(name_group, {})
- warnings = bad_name_group.setdefault(match.lastgroup, [])
- warnings.append((node, node_type, name, confidence))
-
- if match is None:
- self._raise_name_warning(node, node_type, name, confidence)
-
- @staticmethod
- def _name_became_keyword_in_version(name, rules):
- for version, keywords in rules.items():
- if name in keywords and sys.version_info < version:
- return '.'.join(map(str, version))
- return None
-
-
-class DocStringChecker(_BasicChecker):
- msgs = {
- 'C0111': ('Missing %s docstring', # W0131
- 'missing-docstring',
- 'Used when a module, function, class or method has no docstring.'
- 'Some special methods like __init__ doesn\'t necessary require a '
- 'docstring.'),
- 'C0112': ('Empty %s docstring', # W0132
- 'empty-docstring',
- 'Used when a module, function, class or method has an empty '
- 'docstring (it would be too easy ;).'),
- }
- options = (('no-docstring-rgx',
- {'default' : NO_REQUIRED_DOC_RGX,
- 'type' : 'regexp', 'metavar' : '',
- 'help' : 'Regular expression which should only match '
- 'function or class names that do not require a '
- 'docstring.'}
- ),
- ('docstring-min-length',
- {'default' : -1,
- 'type' : 'int', 'metavar' : '',
- 'help': ('Minimum line length for functions/classes that'
- ' require docstrings, shorter ones are exempt.')}
- ),
- )
-
- def open(self):
- self.stats = self.linter.add_stats(undocumented_module=0,
- undocumented_function=0,
- undocumented_method=0,
- undocumented_class=0)
-
- @utils.check_messages('missing-docstring', 'empty-docstring')
- def visit_module(self, node):
- self._check_docstring('module', node)
-
- @utils.check_messages('missing-docstring', 'empty-docstring')
- def visit_classdef(self, node):
- if self.config.no_docstring_rgx.match(node.name) is None:
- self._check_docstring('class', node)
-
- @staticmethod
- def _is_setter_or_deleter(node):
- names = {'setter', 'deleter'}
- for decorator in node.decorators.nodes:
- if (isinstance(decorator, astroid.Attribute)
- and decorator.attrname in names):
- return True
- return False
-
- @utils.check_messages('missing-docstring', 'empty-docstring')
- def visit_functiondef(self, node):
- if self.config.no_docstring_rgx.match(node.name) is None:
- ftype = 'method' if node.is_method() else 'function'
- if node.decorators and self._is_setter_or_deleter(node):
- return
-
- if isinstance(node.parent.frame(), astroid.ClassDef):
- overridden = False
- confidence = (interfaces.INFERENCE if utils.has_known_bases(node.parent.frame())
- else interfaces.INFERENCE_FAILURE)
- # check if node is from a method overridden by its ancestor
- for ancestor in node.parent.frame().ancestors():
- if node.name in ancestor and \
- isinstance(ancestor[node.name], astroid.FunctionDef):
- overridden = True
- break
- self._check_docstring(ftype, node,
- report_missing=not overridden,
- confidence=confidence)
- else:
- self._check_docstring(ftype, node)
-
- visit_asyncfunctiondef = visit_functiondef
-
- def _check_docstring(self, node_type, node, report_missing=True,
- confidence=interfaces.HIGH):
- """check the node has a non empty docstring"""
- docstring = node.doc
- if docstring is None:
- if not report_missing:
- return
- if node.body:
- lines = node.body[-1].lineno - node.body[0].lineno + 1
- else:
- lines = 0
-
- if node_type == 'module' and not lines:
- # If the module has no body, there's no reason
- # to require a docstring.
- return
- max_lines = self.config.docstring_min_length
-
- if node_type != 'module' and max_lines > -1 and lines < max_lines:
- return
- self.stats['undocumented_'+node_type] += 1
- if (node.body and isinstance(node.body[0], astroid.Expr) and
- isinstance(node.body[0].value, astroid.Call)):
- # Most likely a string with a format call. Let's see.
- func = utils.safe_infer(node.body[0].value.func)
- if (isinstance(func, astroid.BoundMethod)
- and isinstance(func.bound, astroid.Instance)):
- # Strings in Python 3, others in Python 2.
- if PY3K and func.bound.name == 'str':
- return
- elif func.bound.name in ('str', 'unicode', 'bytes'):
- return
- self.add_message('missing-docstring', node=node, args=(node_type,),
- confidence=confidence)
- elif not docstring.strip():
- self.stats['undocumented_'+node_type] += 1
- self.add_message('empty-docstring', node=node, args=(node_type,),
- confidence=confidence)
-
-
-class PassChecker(_BasicChecker):
- """check if the pass statement is really necessary"""
- msgs = {'W0107': ('Unnecessary pass statement',
- 'unnecessary-pass',
- 'Used when a "pass" statement that can be avoided is '
- 'encountered.'),
- }
-
- @utils.check_messages('unnecessary-pass')
- def visit_pass(self, node):
- if len(node.parent.child_sequence(node)) > 1:
- self.add_message('unnecessary-pass', node=node)
-
-
-class LambdaForComprehensionChecker(_BasicChecker):
- """check for using a lambda where a comprehension would do.
-
- See
- where GvR says comprehensions would be clearer.
- """
-
- msgs = {'W0110': ('map/filter on lambda could be replaced by comprehension',
- 'deprecated-lambda',
- 'Used when a lambda is the first argument to "map" or '
- '"filter". It could be clearer as a list '
- 'comprehension or generator expression.',
- {'maxversion': (3, 0)}),
- }
-
- @utils.check_messages('deprecated-lambda')
- def visit_call(self, node):
- """visit a CallFunc node, check if map or filter are called with a
- lambda
- """
- if not node.args:
- return
- if not isinstance(node.args[0], astroid.Lambda):
- return
- infered = utils.safe_infer(node.func)
- if (utils.is_builtin_object(infered)
- and infered.name in ['map', 'filter']):
- self.add_message('deprecated-lambda', node=node)
-
-
-def _is_one_arg_pos_call(call):
- """Is this a call with exactly 1 argument,
- where that argument is positional?
- """
- return (isinstance(call, astroid.Call)
- and len(call.args) == 1 and not call.keywords)
-
-
-class ComparisonChecker(_BasicChecker):
- """Checks for comparisons
-
- - singleton comparison: 'expr == True', 'expr == False' and 'expr == None'
- - yoda condition: 'const "comp" right' where comp can be '==', '!=', '<',
- '<=', '>' or '>=', and right can be a variable, an attribute, a method or
- a function
- """
- msgs = {'C0121': ('Comparison to %s should be %s',
- 'singleton-comparison',
- 'Used when an expression is compared to singleton '
- 'values like True, False or None.'),
- 'C0122': ('Comparison should be %s',
- 'misplaced-comparison-constant',
- 'Used when the constant is placed on the left side '
- 'of a comparison. It is usually clearer in intent to '
- 'place it in the right hand side of the comparison.'),
- 'C0123': ('Using type() instead of isinstance() for a typecheck.',
- 'unidiomatic-typecheck',
- 'The idiomatic way to perform an explicit typecheck in '
- 'Python is to use isinstance(x, Y) rather than '
- 'type(x) == Y, type(x) is Y. Though there are unusual '
- 'situations where these give different results.',
- {'old_names': [('W0154', 'unidiomatic-typecheck')]}),
- 'R0123': ('Comparison to literal',
- 'literal-comparison',
- 'Used when comparing an object to a literal, which is usually '
- 'what you do not want to do, since you can compare to a different '
- 'literal than what was expected altogether.'),
- }
-
- def _check_singleton_comparison(self, singleton, root_node):
- if singleton.value is True:
- suggestion = "just 'expr' or 'expr is True'"
- self.add_message('singleton-comparison',
- node=root_node,
- args=(True, suggestion))
- elif singleton.value is False:
- suggestion = "'not expr' or 'expr is False'"
- self.add_message('singleton-comparison',
- node=root_node,
- args=(False, suggestion))
- elif singleton.value is None:
- self.add_message('singleton-comparison',
- node=root_node,
- args=(None, "'expr is None'"))
-
- def _check_literal_comparison(self, literal, node):
- """Check if we compare to a literal, which is usually what we do not want to do."""
- nodes = (astroid.List,
- astroid.Tuple,
- astroid.Dict,
- astroid.Set)
- is_other_literal = isinstance(literal, nodes)
- is_const = False
- if isinstance(literal, astroid.Const):
- if literal.value in (True, False, None):
- # Not interested in this values.
- return
- is_const = isinstance(literal.value, (bytes, str, int, float))
-
- if is_const or is_other_literal:
- self.add_message('literal-comparison', node=node)
-
- def _check_misplaced_constant(self, node, left, right, operator):
- if isinstance(right, astroid.Const):
- return
- operator = REVERSED_COMPS.get(operator, operator)
- suggestion = '%s %s %r' % (right.as_string(), operator, left.value)
- self.add_message('misplaced-comparison-constant', node=node,
- args=(suggestion,))
-
- @utils.check_messages('singleton-comparison', 'misplaced-comparison-constant',
- 'unidiomatic-typecheck', 'literal-comparison')
- def visit_compare(self, node):
- self._check_unidiomatic_typecheck(node)
- # NOTE: this checker only works with binary comparisons like 'x == 42'
- # but not 'x == y == 42'
- if len(node.ops) != 1:
- return
-
- left = node.left
- operator, right = node.ops[0]
- if (operator in ('<', '<=', '>', '>=', '!=', '==')
- and isinstance(left, astroid.Const)):
- self._check_misplaced_constant(node, left, right, operator)
-
- if operator == '==':
- if isinstance(left, astroid.Const):
- self._check_singleton_comparison(left, node)
- elif isinstance(right, astroid.Const):
- self._check_singleton_comparison(right, node)
- if operator in ('is', 'is not'):
- self._check_literal_comparison(right, node)
-
- def _check_unidiomatic_typecheck(self, node):
- operator, right = node.ops[0]
- if operator in TYPECHECK_COMPARISON_OPERATORS:
- left = node.left
- if _is_one_arg_pos_call(left):
- self._check_type_x_is_y(node, left, operator, right)
-
- def _check_type_x_is_y(self, node, left, operator, right):
- """Check for expressions like type(x) == Y."""
- left_func = utils.safe_infer(left.func)
- if not (isinstance(left_func, astroid.ClassDef)
- and left_func.qname() == TYPE_QNAME):
- return
-
- if operator in ('is', 'is not') and _is_one_arg_pos_call(right):
- right_func = utils.safe_infer(right.func)
- if (isinstance(right_func, astroid.ClassDef)
- and right_func.qname() == TYPE_QNAME):
- # type(x) == type(a)
- right_arg = utils.safe_infer(right.args[0])
- if not isinstance(right_arg, LITERAL_NODE_TYPES):
- # not e.g. type(x) == type([])
- return
- self.add_message('unidiomatic-typecheck', node=node)
-
-
-def register(linter):
- """required method to auto register this checker"""
- linter.register_checker(BasicErrorChecker(linter))
- linter.register_checker(BasicChecker(linter))
- linter.register_checker(NameChecker(linter))
- linter.register_checker(DocStringChecker(linter))
- linter.register_checker(PassChecker(linter))
- linter.register_checker(LambdaForComprehensionChecker(linter))
- linter.register_checker(ComparisonChecker(linter))
diff --git a/pymode/libs/pylint/checkers/classes.py b/pymode/libs/pylint/checkers/classes.py
deleted file mode 100644
index 9975220b..00000000
--- a/pymode/libs/pylint/checkers/classes.py
+++ /dev/null
@@ -1,1402 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2006-2016 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2012, 2014 Google, Inc.
-# Copyright (c) 2013-2016 Claudiu Popa
-# Copyright (c) 2015 Dmitry Pribysh
-# Copyright (c) 2016 Moises Lopez - https://www.vauxoo.com/
-# Copyright (c) 2016 Łukasz Rogalski
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""classes checker for Python code
-"""
-from __future__ import generators
-
-import collections
-import sys
-
-import six
-
-import astroid
-from astroid.bases import Generator, BUILTINS
-from astroid.exceptions import InconsistentMroError, DuplicateBasesError
-from astroid import decorators
-from astroid import objects
-from astroid.scoped_nodes import function_to_method
-from pylint.interfaces import IAstroidChecker
-from pylint.checkers import BaseChecker
-from pylint.checkers.utils import (
- PYMETHODS, SPECIAL_METHODS_PARAMS,
- overrides_a_method, check_messages, is_attr_private,
- is_attr_protected, node_frame_class, is_builtin_object,
- decorated_with_property, unimplemented_abstract_methods,
- decorated_with, class_is_abstract,
- safe_infer, has_known_bases, is_iterable, is_comprehension)
-from pylint.utils import get_global_option
-
-
-if sys.version_info >= (3, 0):
- NEXT_METHOD = '__next__'
-else:
- NEXT_METHOD = 'next'
-INVALID_BASE_CLASSES = {'bool', 'range', 'slice', 'memoryview'}
-
-
-# Dealing with useless override detection, with regard
-# to parameters vs arguments
-
-_CallSignature = collections.namedtuple(
- '_CallSignature', 'args kws starred_args starred_kws')
-_ParameterSignature = collections.namedtuple(
- '_ParameterSignature',
- 'args kwonlyargs varargs kwargs',
-)
-
-
-def _signature_from_call(call):
- kws = {}
- args = []
- starred_kws = []
- starred_args = []
- for keyword in call.keywords or []:
- arg, value = keyword.arg, keyword.value
- if arg is None and isinstance(value, astroid.Name):
- # Starred node and we are interested only in names,
- # otherwise some transformation might occur for the parameter.
- starred_kws.append(value.name)
- elif isinstance(value, astroid.Name):
- kws[arg] = value.name
- else:
- kws[arg] = None
-
- for arg in call.args:
- if isinstance(arg, astroid.Starred) and isinstance(arg.value, astroid.Name):
- # Positional variadic and a name, otherwise some transformation
- # might have occurred.
- starred_args.append(arg.value.name)
- elif isinstance(arg, astroid.Name):
- args.append(arg.name)
- else:
- args.append(None)
-
- return _CallSignature(args, kws, starred_args, starred_kws)
-
-
-def _signature_from_arguments(arguments):
- kwarg = arguments.kwarg
- vararg = arguments.vararg
- args = [arg.name for arg in arguments.args if arg.name != 'self']
- kwonlyargs = [arg.name for arg in arguments.kwonlyargs]
- return _ParameterSignature(args, kwonlyargs, vararg, kwarg)
-
-
-def _definition_equivalent_to_call(definition, call):
- '''Check if a definition signature is equivalent to a call.'''
- if definition.kwargs:
- same_kw_variadics = definition.kwargs in call.starred_kws
- else:
- same_kw_variadics = not call.starred_kws
- if definition.varargs:
- same_args_variadics = definition.varargs in call.starred_args
- else:
- same_args_variadics = not call.starred_args
- same_kwonlyargs = all(kw in call.kws for kw in definition.kwonlyargs)
- same_args = definition.args == call.args
-
- no_additional_kwarg_arguments = True
- if call.kws:
- for keyword in call.kws:
- is_arg = keyword in call.args
- is_kwonly = keyword in definition.kwonlyargs
- if not is_arg and not is_kwonly:
- # Maybe this argument goes into **kwargs,
- # or it is an extraneous argument.
- # In any case, the signature is different than
- # the call site, which stops our search.
- no_additional_kwarg_arguments = False
- break
-
- return all((
- same_args,
- same_kwonlyargs,
- same_args_variadics,
- same_kw_variadics,
- no_additional_kwarg_arguments,
- ))
-
-# Deal with parameters overridding in two methods.
-
-def _positional_parameters(method):
- positional = method.args.args
- if method.type in ('classmethod', 'method'):
- positional = positional[1:]
- return positional
-
-
-def _has_different_parameters(original, overridden, dummy_parameter_regex):
- zipped = six.moves.zip_longest(original, overridden)
- for original_param, overridden_param in zipped:
- params = (original_param, overridden_param)
- if not all(params):
- return True
-
- names = [param.name for param in params]
- if any(map(dummy_parameter_regex.match, names)):
- continue
- if original_param.name != overridden_param.name:
- return True
- return False
-
-
-def _different_parameters(original, overridden, dummy_parameter_regex):
- """Determine if the two methods have different parameters
-
- They are considered to have different parameters if:
-
- * they have different positional parameters, including different names
-
- * one of the methods is having variadics, while the other is not
-
- * they have different keyword only parameters.
-
- """
- original_parameters = _positional_parameters(original)
- overridden_parameters = _positional_parameters(overridden)
-
- different_positional = _has_different_parameters(
- original_parameters,
- overridden_parameters,
- dummy_parameter_regex)
- different_kwonly = _has_different_parameters(
- original.args.kwonlyargs,
- overridden.args.kwonlyargs,
- dummy_parameter_regex)
- if original.name in PYMETHODS:
- # Ignore the difference for special methods. If the parameter
- # numbers are different, then that is going to be caught by
- # unexpected-special-method-signature.
- # If the names are different, it doesn't matter, since they can't
- # be used as keyword arguments anyway.
- different_positional = different_kwonly = False
-
- # Both or none should have extra variadics, otherwise the method
- # loses or gains capabilities that are not reflected into the parent method,
- # leading to potential inconsistencies in the code.
- different_kwarg = sum(
- 1 for param in (original.args.kwarg, overridden.args.kwarg)
- if not param) == 1
- different_vararg = sum(
- 1 for param in (original.args.vararg, overridden.args.vararg)
- if not param) == 1
-
- return any((
- different_positional,
- different_kwarg,
- different_vararg,
- different_kwonly
- ))
-
-
-def _is_invalid_base_class(cls):
- return cls.name in INVALID_BASE_CLASSES and is_builtin_object(cls)
-
-
-def _has_data_descriptor(cls, attr):
- attributes = cls.getattr(attr)
- for attribute in attributes:
- try:
- for inferred in attribute.infer():
- if isinstance(inferred, astroid.Instance):
- try:
- inferred.getattr('__get__')
- inferred.getattr('__set__')
- except astroid.NotFoundError:
- continue
- else:
- return True
- except astroid.InferenceError:
- # Can't infer, avoid emitting a false positive in this case.
- return True
- return False
-
-
-def _called_in_methods(func, klass, methods):
- """ Check if the func was called in any of the given methods,
- belonging to the *klass*. Returns True if so, False otherwise.
- """
- if not isinstance(func, astroid.FunctionDef):
- return False
- for method in methods:
- try:
- infered = klass.getattr(method)
- except astroid.NotFoundError:
- continue
- for infer_method in infered:
- for callfunc in infer_method.nodes_of_class(astroid.Call):
- try:
- bound = next(callfunc.func.infer())
- except (astroid.InferenceError, StopIteration):
- continue
- if not isinstance(bound, astroid.BoundMethod):
- continue
- func_obj = bound._proxied
- if isinstance(func_obj, astroid.UnboundMethod):
- func_obj = func_obj._proxied
- if func_obj.name == func.name:
- return True
- return False
-
-
-def _is_attribute_property(name, klass):
- """ Check if the given attribute *name* is a property
- in the given *klass*.
-
- It will look for `property` calls or for functions
- with the given name, decorated by `property` or `property`
- subclasses.
- Returns ``True`` if the name is a property in the given klass,
- ``False`` otherwise.
- """
-
- try:
- attributes = klass.getattr(name)
- except astroid.NotFoundError:
- return False
- property_name = "{0}.property".format(BUILTINS)
- for attr in attributes:
- try:
- infered = next(attr.infer())
- except astroid.InferenceError:
- continue
- if (isinstance(infered, astroid.FunctionDef) and
- decorated_with_property(infered)):
- return True
- if infered.pytype() == property_name:
- return True
- return False
-
-
-def _has_bare_super_call(fundef_node):
- for call in fundef_node.nodes_of_class(astroid.Call):
- func = call.func
- if (isinstance(func, astroid.Name) and
- func.name == 'super' and
- not call.args):
- return True
- return False
-
-
-def _safe_infer_call_result(node, caller, context=None):
- """
- Safely infer the return value of a function.
-
- Returns None if inference failed or if there is some ambiguity (more than
- one node has been inferred). Otherwise returns infered value.
- """
- try:
- inferit = node.infer_call_result(caller, context=context)
- value = next(inferit)
- except astroid.InferenceError:
- return # inference failed
- except StopIteration:
- return # no values infered
- try:
- next(inferit)
- return # there is ambiguity on the inferred node
- except astroid.InferenceError:
- return # there is some kind of ambiguity
- except StopIteration:
- return value
-
-
-MSGS = {
- 'F0202': ('Unable to check methods signature (%s / %s)',
- 'method-check-failed',
- 'Used when Pylint has been unable to check methods signature '
- 'compatibility for an unexpected reason. Please report this kind '
- 'if you don\'t make sense of it.'),
-
- 'E0202': ('An attribute defined in %s line %s hides this method',
- 'method-hidden',
- 'Used when a class defines a method which is hidden by an '
- 'instance attribute from an ancestor class or set by some '
- 'client code.'),
- 'E0203': ('Access to member %r before its definition line %s',
- 'access-member-before-definition',
- 'Used when an instance member is accessed before it\'s actually '
- 'assigned.'),
- 'W0201': ('Attribute %r defined outside __init__',
- 'attribute-defined-outside-init',
- 'Used when an instance attribute is defined outside the __init__ '
- 'method.'),
-
- 'W0212': ('Access to a protected member %s of a client class', # E0214
- 'protected-access',
- 'Used when a protected member (i.e. class member with a name '
- 'beginning with an underscore) is access outside the class or a '
- 'descendant of the class where it\'s defined.'),
-
- 'E0211': ('Method has no argument',
- 'no-method-argument',
- 'Used when a method which should have the bound instance as '
- 'first argument has no argument defined.'),
- 'E0213': ('Method should have "self" as first argument',
- 'no-self-argument',
- 'Used when a method has an attribute different the "self" as '
- 'first argument. This is considered as an error since this is '
- 'a so common convention that you shouldn\'t break it!'),
- 'C0202': ('Class method %s should have %s as first argument',
- 'bad-classmethod-argument',
- 'Used when a class method has a first argument named differently '
- 'than the value specified in valid-classmethod-first-arg option '
- '(default to "cls"), recommended to easily differentiate them '
- 'from regular instance methods.'),
- 'C0203': ('Metaclass method %s should have %s as first argument',
- 'bad-mcs-method-argument',
- 'Used when a metaclass method has a first argument named '
- 'differently than the value specified in valid-classmethod-first'
- '-arg option (default to "cls"), recommended to easily '
- 'differentiate them from regular instance methods.'),
- 'C0204': ('Metaclass class method %s should have %s as first argument',
- 'bad-mcs-classmethod-argument',
- 'Used when a metaclass class method has a first argument named '
- 'differently than the value specified in valid-metaclass-'
- 'classmethod-first-arg option (default to "mcs"), recommended to '
- 'easily differentiate them from regular instance methods.'),
-
- 'W0211': ('Static method with %r as first argument',
- 'bad-staticmethod-argument',
- 'Used when a static method has "self" or a value specified in '
- 'valid-classmethod-first-arg option or '
- 'valid-metaclass-classmethod-first-arg option as first argument.'
- ),
- 'R0201': ('Method could be a function',
- 'no-self-use',
- 'Used when a method doesn\'t use its bound instance, and so could '
- 'be written as a function.'
- ),
- 'W0221': ('Parameters differ from %s %r method',
- 'arguments-differ',
- 'Used when a method has a different number of arguments than in '
- 'the implemented interface or in an overridden method.'),
- 'W0222': ('Signature differs from %s %r method',
- 'signature-differs',
- 'Used when a method signature is different than in the '
- 'implemented interface or in an overridden method.'),
- 'W0223': ('Method %r is abstract in class %r but is not overridden',
- 'abstract-method',
- 'Used when an abstract method (i.e. raise NotImplementedError) is '
- 'not overridden in concrete class.'
- ),
- 'W0231': ('__init__ method from base class %r is not called',
- 'super-init-not-called',
- 'Used when an ancestor class method has an __init__ method '
- 'which is not called by a derived class.'),
- 'W0232': ('Class has no __init__ method',
- 'no-init',
- 'Used when a class has no __init__ method, neither its parent '
- 'classes.'),
- 'W0233': ('__init__ method from a non direct base class %r is called',
- 'non-parent-init-called',
- 'Used when an __init__ method is called on a class which is not '
- 'in the direct ancestors for the analysed class.'),
- 'W0235': ('Useless super delegation in method %r',
- 'useless-super-delegation',
- 'Used whenever we can detect that an overridden method is useless, '
- 'relying on super() delegation to do the same thing as another method '
- 'from the MRO.'),
- 'E0236': ('Invalid object %r in __slots__, must contain '
- 'only non empty strings',
- 'invalid-slots-object',
- 'Used when an invalid (non-string) object occurs in __slots__.'),
- 'E0237': ('Assigning to attribute %r not defined in class slots',
- 'assigning-non-slot',
- 'Used when assigning to an attribute not defined '
- 'in the class slots.'),
- 'E0238': ('Invalid __slots__ object',
- 'invalid-slots',
- 'Used when an invalid __slots__ is found in class. '
- 'Only a string, an iterable or a sequence is permitted.'),
- 'E0239': ('Inheriting %r, which is not a class.',
- 'inherit-non-class',
- 'Used when a class inherits from something which is not a '
- 'class.'),
- 'E0240': ('Inconsistent method resolution order for class %r',
- 'inconsistent-mro',
- 'Used when a class has an inconsistent method resolution order.'),
- 'E0241': ('Duplicate bases for class %r',
- 'duplicate-bases',
- 'Used when a class has duplicate bases.'),
- 'R0202': ('Consider using a decorator instead of calling classmethod',
- 'no-classmethod-decorator',
- 'Used when a class method is defined without using the decorator '
- 'syntax.'),
- 'R0203': ('Consider using a decorator instead of calling staticmethod',
- 'no-staticmethod-decorator',
- 'Used when a static method is defined without using the decorator '
- 'syntax.'),
- 'C0205': ('Class __slots__ should be a non-string iterable',
- 'single-string-used-for-slots',
- 'Used when a class __slots__ is a simple string, rather '
- 'than an iterable.'),
- }
-
-
-class ScopeAccessMap(object):
- """Store the accessed variables per scope."""
-
- def __init__(self):
- self._scopes = collections.defaultdict(
- lambda: collections.defaultdict(list)
- )
-
- def set_accessed(self, node):
- """Set the given node as accessed."""
-
- frame = node_frame_class(node)
- if frame is None:
- # The node does not live in a class.
- return
- self._scopes[frame][node.attrname].append(node)
-
- def accessed(self, scope):
- """Get the accessed variables for the given scope."""
- return self._scopes.get(scope, {})
-
-
-class ClassChecker(BaseChecker):
- """checks for :
- * methods without self as first argument
- * overridden methods signature
- * access only to existent members via self
- * attributes not defined in the __init__ method
- * unreachable code
- """
-
- __implements__ = (IAstroidChecker,)
-
- # configuration section name
- name = 'classes'
- # messages
- msgs = MSGS
- priority = -2
- # configuration options
- options = (('defining-attr-methods',
- {'default' : ('__init__', '__new__', 'setUp'),
- 'type' : 'csv',
- 'metavar' : '',
- 'help' : 'List of method names used to declare (i.e. assign) \
-instance attributes.'}
- ),
- ('valid-classmethod-first-arg',
- {'default' : ('cls',),
- 'type' : 'csv',
- 'metavar' : '',
- 'help' : 'List of valid names for the first argument in \
-a class method.'}
- ),
- ('valid-metaclass-classmethod-first-arg',
- {'default' : ('mcs',),
- 'type' : 'csv',
- 'metavar' : '',
- 'help' : 'List of valid names for the first argument in \
-a metaclass class method.'}
- ),
- ('exclude-protected',
- {
- 'default': (
- # namedtuple public API.
- '_asdict', '_fields', '_replace', '_source', '_make'),
- 'type': 'csv',
- 'metavar': '',
- 'help': ('List of member names, which should be excluded '
- 'from the protected access warning.')}
- ))
-
- def __init__(self, linter=None):
- BaseChecker.__init__(self, linter)
- self._accessed = ScopeAccessMap()
- self._first_attrs = []
- self._meth_could_be_func = None
-
- @decorators.cachedproperty
- def _dummy_rgx(self):
- return get_global_option(
- self, 'dummy-variables-rgx', default=None)
-
- @decorators.cachedproperty
- def _ignore_mixin(self):
- return get_global_option(
- self, 'ignore-mixin-members', default=True)
-
- def visit_classdef(self, node):
- """init visit variable _accessed
- """
- self._check_bases_classes(node)
- # if not an exception or a metaclass
- if node.type == 'class' and has_known_bases(node):
- try:
- node.local_attr('__init__')
- except astroid.NotFoundError:
- self.add_message('no-init', args=node, node=node)
- self._check_slots(node)
- self._check_proper_bases(node)
- self._check_consistent_mro(node)
-
- def _check_consistent_mro(self, node):
- """Detect that a class has a consistent mro or duplicate bases."""
- try:
- node.mro()
- except InconsistentMroError:
- self.add_message('inconsistent-mro', args=node.name, node=node)
- except DuplicateBasesError:
- self.add_message('duplicate-bases', args=node.name, node=node)
- except NotImplementedError:
- # Old style class, there's no mro so don't do anything.
- pass
-
- def _check_proper_bases(self, node):
- """
- Detect that a class inherits something which is not
- a class or a type.
- """
- for base in node.bases:
- ancestor = safe_infer(base)
- if ancestor in (astroid.YES, None):
- continue
- if (isinstance(ancestor, astroid.Instance) and
- ancestor.is_subtype_of('%s.type' % (BUILTINS,))):
- continue
-
- if (not isinstance(ancestor, astroid.ClassDef) or
- _is_invalid_base_class(ancestor)):
- self.add_message('inherit-non-class',
- args=base.as_string(), node=node)
-
- def leave_classdef(self, cnode):
- """close a class node:
- check that instance attributes are defined in __init__ and check
- access to existent members
- """
- # check access to existent members on non metaclass classes
- if self._ignore_mixin and cnode.name[-5:].lower() == 'mixin':
- # We are in a mixin class. No need to try to figure out if
- # something is missing, since it is most likely that it will
- # miss.
- return
-
- accessed = self._accessed.accessed(cnode)
- if cnode.type != 'metaclass':
- self._check_accessed_members(cnode, accessed)
- # checks attributes are defined in an allowed method such as __init__
- if not self.linter.is_message_enabled('attribute-defined-outside-init'):
- return
- defining_methods = self.config.defining_attr_methods
- current_module = cnode.root()
- for attr, nodes in six.iteritems(cnode.instance_attrs):
- # skip nodes which are not in the current module and it may screw up
- # the output, while it's not worth it
- nodes = [n for n in nodes if not
- isinstance(n.statement(), (astroid.Delete, astroid.AugAssign))
- and n.root() is current_module]
- if not nodes:
- continue # error detected by typechecking
- # check if any method attr is defined in is a defining method
- if any(node.frame().name in defining_methods
- for node in nodes):
- continue
-
- # check attribute is defined in a parent's __init__
- for parent in cnode.instance_attr_ancestors(attr):
- attr_defined = False
- # check if any parent method attr is defined in is a defining method
- for node in parent.instance_attrs[attr]:
- if node.frame().name in defining_methods:
- attr_defined = True
- if attr_defined:
- # we're done :)
- break
- else:
- # check attribute is defined as a class attribute
- try:
- cnode.local_attr(attr)
- except astroid.NotFoundError:
- for node in nodes:
- if node.frame().name not in defining_methods:
- # If the attribute was set by a callfunc in any
- # of the defining methods, then don't emit
- # the warning.
- if _called_in_methods(node.frame(), cnode,
- defining_methods):
- continue
- self.add_message('attribute-defined-outside-init',
- args=attr, node=node)
-
- def visit_functiondef(self, node):
- """check method arguments, overriding"""
- # ignore actual functions
- if not node.is_method():
- return
-
- self._check_useless_super_delegation(node)
-
- klass = node.parent.frame()
- self._meth_could_be_func = True
- # check first argument is self if this is actually a method
- self._check_first_arg_for_type(node, klass.type == 'metaclass')
- if node.name == '__init__':
- self._check_init(node)
- return
- # check signature if the method overloads inherited method
- for overridden in klass.local_attr_ancestors(node.name):
- # get astroid for the searched method
- try:
- meth_node = overridden[node.name]
- except KeyError:
- # we have found the method but it's not in the local
- # dictionary.
- # This may happen with astroid build from living objects
- continue
- if not isinstance(meth_node, astroid.FunctionDef):
- continue
- self._check_signature(node, meth_node, 'overridden', klass)
- break
- if node.decorators:
- for decorator in node.decorators.nodes:
- if isinstance(decorator, astroid.Attribute) and \
- decorator.attrname in ('getter', 'setter', 'deleter'):
- # attribute affectation will call this method, not hiding it
- return
- if isinstance(decorator, astroid.Name) and decorator.name == 'property':
- # attribute affectation will either call a setter or raise
- # an attribute error, anyway not hiding the function
- return
- # check if the method is hidden by an attribute
- try:
- overridden = klass.instance_attr(node.name)[0] # XXX
- overridden_frame = overridden.frame()
- if (isinstance(overridden_frame, astroid.FunctionDef)
- and overridden_frame.type == 'method'):
- overridden_frame = overridden_frame.parent.frame()
- if (isinstance(overridden_frame, astroid.ClassDef)
- and klass.is_subtype_of(overridden_frame.qname())):
- args = (overridden.root().name, overridden.fromlineno)
- self.add_message('method-hidden', args=args, node=node)
- except astroid.NotFoundError:
- pass
-
- visit_asyncfunctiondef = visit_functiondef
-
- def _check_useless_super_delegation(self, function):
- '''Check if the given function node is an useless method override
-
- We consider it *useless* if it uses the super() builtin, but having
- nothing additional whatsoever than not implementing the method at all.
- If the method uses super() to delegate an operation to the rest of the MRO,
- and if the method called is the same as the current one, the arguments
- passed to super() are the same as the parameters that were passed to
- this method, then the method could be removed altogether, by letting
- other implementation to take precedence.
- '''
-
- if not function.is_method():
- return
-
- if function.decorators:
- # With decorators is a change of use
- return
-
- body = function.body
- if len(body) != 1:
- # Multiple statements, which means this overridden method
- # could do multiple things we are not aware of.
- return
-
- statement = body[0]
- if not isinstance(statement, (astroid.Expr, astroid.Return)):
- # Doing something else than what we are interested into.
- return
-
- call = statement.value
- if not isinstance(call, astroid.Call):
- return
- if not isinstance(call.func, astroid.Attribute):
- # Not a super() attribute access.
- return
-
- # Should be a super call.
- try:
- super_call = next(call.func.expr.infer())
- except astroid.InferenceError:
- return
- else:
- if not isinstance(super_call, objects.Super):
- return
-
- # The name should be the same.
- if call.func.attrname != function.name:
- return
-
- # Should be a super call with the MRO pointer being the current class
- # and the type being the current instance.
- current_scope = function.parent.scope()
- if super_call.mro_pointer != current_scope:
- return
- if not isinstance(super_call.type, astroid.Instance):
- return
- if super_call.type.name != current_scope.name:
- return
-
- # Detect if the parameters are the same as the call's arguments.
- params = _signature_from_arguments(function.args)
- args = _signature_from_call(call)
- if _definition_equivalent_to_call(params, args):
- self.add_message('useless-super-delegation', node=function,
- args=(function.name, ))
-
- def _check_slots(self, node):
- if '__slots__' not in node.locals:
- return
- for slots in node.igetattr('__slots__'):
- # check if __slots__ is a valid type
- if slots is astroid.YES:
- continue
- if not is_iterable(slots) and not is_comprehension(slots):
- self.add_message('invalid-slots', node=node)
- continue
-
- if isinstance(slots, astroid.Const):
- # a string, ignore the following checks
- self.add_message('single-string-used-for-slots', node=node)
- continue
- if not hasattr(slots, 'itered'):
- # we can't obtain the values, maybe a .deque?
- continue
-
- if isinstance(slots, astroid.Dict):
- values = [item[0] for item in slots.items]
- else:
- values = slots.itered()
- if values is astroid.YES:
- return
-
- for elt in values:
- try:
- self._check_slots_elt(elt)
- except astroid.InferenceError:
- continue
-
- def _check_slots_elt(self, elt):
- for infered in elt.infer():
- if infered is astroid.YES:
- continue
- if (not isinstance(infered, astroid.Const) or
- not isinstance(infered.value, six.string_types)):
- self.add_message('invalid-slots-object',
- args=infered.as_string(),
- node=elt)
- continue
- if not infered.value:
- self.add_message('invalid-slots-object',
- args=infered.as_string(),
- node=elt)
-
- def leave_functiondef(self, node):
- """on method node, check if this method couldn't be a function
-
- ignore class, static and abstract methods, initializer,
- methods overridden from a parent class.
- """
- if node.is_method():
- if node.args.args is not None:
- self._first_attrs.pop()
- if not self.linter.is_message_enabled('no-self-use'):
- return
- class_node = node.parent.frame()
- if (self._meth_could_be_func and node.type == 'method'
- and node.name not in PYMETHODS
- and not (node.is_abstract() or
- overrides_a_method(class_node, node.name) or
- decorated_with_property(node) or
- (six.PY3 and _has_bare_super_call(node)))):
- self.add_message('no-self-use', node=node)
-
- def visit_attribute(self, node):
- """check if the getattr is an access to a class member
- if so, register it. Also check for access to protected
- class member from outside its class (but ignore __special__
- methods)
- """
- # Check self
- if self._uses_mandatory_method_param(node):
- self._accessed.set_accessed(node)
- return
- if not self.linter.is_message_enabled('protected-access'):
- return
-
- self._check_protected_attribute_access(node)
-
- def visit_assignattr(self, node):
- if (isinstance(node.assign_type(), astroid.AugAssign) and
- self._uses_mandatory_method_param(node)):
- self._accessed.set_accessed(node)
- self._check_in_slots(node)
-
- def _check_in_slots(self, node):
- """ Check that the given assattr node
- is defined in the class slots.
- """
- infered = safe_infer(node.expr)
- if infered and isinstance(infered, astroid.Instance):
- klass = infered._proxied
- if '__slots__' not in klass.locals or not klass.newstyle:
- return
-
- slots = klass.slots()
- if slots is None:
- return
- # If any ancestor doesn't use slots, the slots
- # defined for this class are superfluous.
- if any('__slots__' not in ancestor.locals and
- ancestor.name != 'object'
- for ancestor in klass.ancestors()):
- return
-
- if not any(slot.value == node.attrname for slot in slots):
- # If we have a '__dict__' in slots, then
- # assigning any name is valid.
- if not any(slot.value == '__dict__' for slot in slots):
- if _is_attribute_property(node.attrname, klass):
- # Properties circumvent the slots mechanism,
- # so we should not emit a warning for them.
- return
- if (node.attrname in klass.locals
- and _has_data_descriptor(klass, node.attrname)):
- # Descriptors circumvent the slots mechanism as well.
- return
- self.add_message('assigning-non-slot',
- args=(node.attrname, ), node=node)
-
- @check_messages('protected-access', 'no-classmethod-decorator',
- 'no-staticmethod-decorator')
- def visit_assign(self, assign_node):
- self._check_classmethod_declaration(assign_node)
- node = assign_node.targets[0]
- if not isinstance(node, astroid.AssignAttr):
- return
-
- if self._uses_mandatory_method_param(node):
- return
- self._check_protected_attribute_access(node)
-
- def _check_classmethod_declaration(self, node):
- """Checks for uses of classmethod() or staticmethod()
-
- When a @classmethod or @staticmethod decorator should be used instead.
- A message will be emitted only if the assignment is at a class scope
- and only if the classmethod's argument belongs to the class where it
- is defined.
- `node` is an assign node.
- """
- if not isinstance(node.value, astroid.Call):
- return
-
- # check the function called is "classmethod" or "staticmethod"
- func = node.value.func
- if (not isinstance(func, astroid.Name) or
- func.name not in ('classmethod', 'staticmethod')):
- return
-
- msg = ('no-classmethod-decorator' if func.name == 'classmethod' else
- 'no-staticmethod-decorator')
- # assignment must be at a class scope
- parent_class = node.scope()
- if not isinstance(parent_class, astroid.ClassDef):
- return
-
- # Check if the arg passed to classmethod is a class member
- classmeth_arg = node.value.args[0]
- if not isinstance(classmeth_arg, astroid.Name):
- return
-
- method_name = classmeth_arg.name
- if any(method_name == member.name
- for member in parent_class.mymethods()):
- self.add_message(msg, node=node.targets[0])
-
- def _check_protected_attribute_access(self, node):
- '''Given an attribute access node (set or get), check if attribute
- access is legitimate. Call _check_first_attr with node before calling
- this method. Valid cases are:
- * self._attr in a method or cls._attr in a classmethod. Checked by
- _check_first_attr.
- * Klass._attr inside "Klass" class.
- * Klass2._attr inside "Klass" class when Klass2 is a base class of
- Klass.
- '''
- attrname = node.attrname
-
- if (is_attr_protected(attrname) and
- attrname not in self.config.exclude_protected):
-
- klass = node_frame_class(node)
-
- # XXX infer to be more safe and less dirty ??
- # in classes, check we are not getting a parent method
- # through the class object or through super
- callee = node.expr.as_string()
-
- # We are not in a class, no remaining valid case
- if klass is None:
- self.add_message('protected-access', node=node, args=attrname)
- return
-
- # If the expression begins with a call to super, that's ok.
- if isinstance(node.expr, astroid.Call) and \
- isinstance(node.expr.func, astroid.Name) and \
- node.expr.func.name == 'super':
- return
-
- # If the expression begins with a call to type(self), that's ok.
- if self._is_type_self_call(node.expr):
- return
-
- # We are in a class, one remaining valid cases, Klass._attr inside
- # Klass
- if not (callee == klass.name or callee in klass.basenames):
- # Detect property assignments in the body of the class.
- # This is acceptable:
- #
- # class A:
- # b = property(lambda: self._b)
-
- stmt = node.parent.statement()
- if (isinstance(stmt, astroid.Assign)
- and len(stmt.targets) == 1
- and isinstance(stmt.targets[0], astroid.AssignName)):
- name = stmt.targets[0].name
- if _is_attribute_property(name, klass):
- return
-
- self.add_message('protected-access', node=node, args=attrname)
-
- def _is_type_self_call(self, expr):
- return (isinstance(expr, astroid.Call) and
- isinstance(expr.func, astroid.Name) and
- expr.func.name == 'type' and len(expr.args) == 1 and
- self._is_mandatory_method_param(expr.args[0]))
-
- def visit_name(self, node):
- """check if the name handle an access to a class member
- if so, register it
- """
- if self._first_attrs and (node.name == self._first_attrs[-1] or
- not self._first_attrs[-1]):
- self._meth_could_be_func = False
-
- def _check_accessed_members(self, node, accessed):
- """check that accessed members are defined"""
- # XXX refactor, probably much simpler now that E0201 is in type checker
- excs = ('AttributeError', 'Exception', 'BaseException')
- for attr, nodes in six.iteritems(accessed):
- try:
- # is it a class attribute ?
- node.local_attr(attr)
- # yes, stop here
- continue
- except astroid.NotFoundError:
- pass
- # is it an instance attribute of a parent class ?
- try:
- next(node.instance_attr_ancestors(attr))
- # yes, stop here
- continue
- except StopIteration:
- pass
- # is it an instance attribute ?
- try:
- defstmts = node.instance_attr(attr)
- except astroid.NotFoundError:
- pass
- else:
- # filter out augment assignment nodes
- defstmts = [stmt for stmt in defstmts if stmt not in nodes]
- if not defstmts:
- # only augment assignment for this node, no-member should be
- # triggered by the typecheck checker
- continue
- # filter defstmts to only pick the first one when there are
- # several assignments in the same scope
- scope = defstmts[0].scope()
- defstmts = [stmt for i, stmt in enumerate(defstmts)
- if i == 0 or stmt.scope() is not scope]
- # if there are still more than one, don't attempt to be smarter
- # than we can be
- if len(defstmts) == 1:
- defstmt = defstmts[0]
- # check that if the node is accessed in the same method as
- # it's defined, it's accessed after the initial assignment
- frame = defstmt.frame()
- lno = defstmt.fromlineno
- for _node in nodes:
- if _node.frame() is frame and _node.fromlineno < lno \
- and not astroid.are_exclusive(_node.statement(), defstmt, excs):
- self.add_message('access-member-before-definition',
- node=_node, args=(attr, lno))
-
- def _check_first_arg_for_type(self, node, metaclass=0):
- """check the name of first argument, expect:
-
- * 'self' for a regular method
- * 'cls' for a class method or a metaclass regular method (actually
- valid-classmethod-first-arg value)
- * 'mcs' for a metaclass class method (actually
- valid-metaclass-classmethod-first-arg)
- * not one of the above for a static method
- """
- # don't care about functions with unknown argument (builtins)
- if node.args.args is None:
- return
- first_arg = node.args.args and node.argnames()[0]
- self._first_attrs.append(first_arg)
- first = self._first_attrs[-1]
- # static method
- if node.type == 'staticmethod':
- if (first_arg == 'self' or
- first_arg in self.config.valid_classmethod_first_arg or
- first_arg in self.config.valid_metaclass_classmethod_first_arg):
- self.add_message('bad-staticmethod-argument', args=first, node=node)
- return
- self._first_attrs[-1] = None
- # class / regular method with no args
- elif not node.args.args:
- self.add_message('no-method-argument', node=node)
- # metaclass
- elif metaclass:
- # metaclass __new__ or classmethod
- if node.type == 'classmethod':
- self._check_first_arg_config(
- first,
- self.config.valid_metaclass_classmethod_first_arg, node,
- 'bad-mcs-classmethod-argument', node.name)
- # metaclass regular method
- else:
- self._check_first_arg_config(
- first,
- self.config.valid_classmethod_first_arg, node,
- 'bad-mcs-method-argument',
- node.name)
- # regular class
- else:
- # class method
- if node.type == 'classmethod':
- self._check_first_arg_config(
- first,
- self.config.valid_classmethod_first_arg, node,
- 'bad-classmethod-argument',
- node.name)
- # regular method without self as argument
- elif first != 'self':
- self.add_message('no-self-argument', node=node)
-
- def _check_first_arg_config(self, first, config, node, message,
- method_name):
- if first not in config:
- if len(config) == 1:
- valid = repr(config[0])
- else:
- valid = ', '.join(repr(v) for v in config[:-1])
- valid = '%s or %r' % (valid, config[-1])
- self.add_message(message, args=(method_name, valid), node=node)
-
- def _check_bases_classes(self, node):
- """check that the given class node implements abstract methods from
- base classes
- """
- def is_abstract(method):
- return method.is_abstract(pass_is_abstract=False)
-
- # check if this class abstract
- if class_is_abstract(node):
- return
-
- methods = sorted(
- unimplemented_abstract_methods(node, is_abstract).items(),
- key=lambda item: item[0],
- )
- for name, method in methods:
- owner = method.parent.frame()
- if owner is node:
- continue
- # owner is not this class, it must be a parent class
- # check that the ancestor's method is not abstract
- if name in node.locals:
- # it is redefined as an attribute or with a descriptor
- continue
- self.add_message('abstract-method', node=node,
- args=(name, owner.name))
-
- def _check_init(self, node):
- """check that the __init__ method call super or ancestors'__init__
- method
- """
- if (not self.linter.is_message_enabled('super-init-not-called') and
- not self.linter.is_message_enabled('non-parent-init-called')):
- return
- klass_node = node.parent.frame()
- to_call = _ancestors_to_call(klass_node)
- not_called_yet = dict(to_call)
- for stmt in node.nodes_of_class(astroid.Call):
- expr = stmt.func
- if not isinstance(expr, astroid.Attribute) \
- or expr.attrname != '__init__':
- continue
- # skip the test if using super
- if isinstance(expr.expr, astroid.Call) and \
- isinstance(expr.expr.func, astroid.Name) and \
- expr.expr.func.name == 'super':
- return
- try:
- for klass in expr.expr.infer():
- if klass is astroid.YES:
- continue
- # The infered klass can be super(), which was
- # assigned to a variable and the `__init__`
- # was called later.
- #
- # base = super()
- # base.__init__(...)
-
- if (isinstance(klass, astroid.Instance) and
- isinstance(klass._proxied, astroid.ClassDef) and
- is_builtin_object(klass._proxied) and
- klass._proxied.name == 'super'):
- return
- elif isinstance(klass, objects.Super):
- return
- try:
- del not_called_yet[klass]
- except KeyError:
- if klass not in to_call:
- self.add_message('non-parent-init-called',
- node=expr, args=klass.name)
- except astroid.InferenceError:
- continue
- for klass, method in six.iteritems(not_called_yet):
- cls = node_frame_class(method)
- if klass.name == 'object' or (cls and cls.name == 'object'):
- continue
- self.add_message('super-init-not-called', args=klass.name, node=node)
-
- def _check_signature(self, method1, refmethod, class_type, cls):
- """check that the signature of the two given methods match
- """
- if not (isinstance(method1, astroid.FunctionDef)
- and isinstance(refmethod, astroid.FunctionDef)):
- self.add_message('method-check-failed',
- args=(method1, refmethod), node=method1)
- return
-
- instance = cls.instantiate_class()
- method1 = function_to_method(method1, instance)
- refmethod = function_to_method(refmethod, instance)
-
- # Don't care about functions with unknown argument (builtins).
- if method1.args.args is None or refmethod.args.args is None:
- return
-
- # Ignore private to class methods.
- if is_attr_private(method1.name):
- return
- # Ignore setters, they have an implicit extra argument,
- # which shouldn't be taken in consideration.
- if method1.decorators:
- for decorator in method1.decorators.nodes:
- if (isinstance(decorator, astroid.Attribute) and
- decorator.attrname == 'setter'):
- return
-
- if _different_parameters(
- refmethod, method1,
- dummy_parameter_regex=self._dummy_rgx):
- self.add_message('arguments-differ',
- args=(class_type, method1.name),
- node=method1)
- elif len(method1.args.defaults) < len(refmethod.args.defaults):
- self.add_message('signature-differs',
- args=(class_type, method1.name),
- node=method1)
-
- def _uses_mandatory_method_param(self, node):
- """Check that attribute lookup name use first attribute variable name
-
- Name is `self` for method, `cls` for classmethod and `mcs` for metaclass.
- """
- return self._is_mandatory_method_param(node.expr)
-
- def _is_mandatory_method_param(self, node):
- """Check if astroid.Name corresponds to first attribute variable name
-
- Name is `self` for method, `cls` for classmethod and `mcs` for metaclass.
- """
- return (self._first_attrs and isinstance(node, astroid.Name)
- and node.name == self._first_attrs[-1])
-
-
-class SpecialMethodsChecker(BaseChecker):
- """Checker which verifies that special methods
- are implemented correctly.
- """
- __implements__ = (IAstroidChecker, )
- name = 'classes'
- msgs = {
- 'E0301': ('__iter__ returns non-iterator',
- 'non-iterator-returned',
- 'Used when an __iter__ method returns something which is not an '
- 'iterable (i.e. has no `%s` method)' % NEXT_METHOD,
- {'old_names': [('W0234', 'non-iterator-returned'),
- ('E0234', 'non-iterator-returned')]}),
- 'E0302': ('The special method %r expects %s param(s), %d %s given',
- 'unexpected-special-method-signature',
- 'Emitted when a special method was defined with an '
- 'invalid number of parameters. If it has too few or '
- 'too many, it might not work at all.',
- {'old_names': [('E0235', 'bad-context-manager')]}),
- 'E0303': ('__len__ does not return non-negative integer',
- 'invalid-length-returned',
- 'Used when an __len__ method returns something which is not a '
- 'non-negative integer', {}),
- }
- priority = -2
-
- @check_messages('unexpected-special-method-signature',
- 'non-iterator-returned', 'invalid-length-returned')
- def visit_functiondef(self, node):
- if not node.is_method():
- return
- if node.name == '__iter__':
- self._check_iter(node)
- if node.name == '__len__':
- self._check_len(node)
- if node.name in PYMETHODS:
- self._check_unexpected_method_signature(node)
-
- visit_asyncfunctiondef = visit_functiondef
-
- def _check_unexpected_method_signature(self, node):
- expected_params = SPECIAL_METHODS_PARAMS[node.name]
-
- if expected_params is None:
- # This can support a variable number of parameters.
- return
- if not node.args.args and not node.args.vararg:
- # Method has no parameter, will be caught
- # by no-method-argument.
- return
-
- if decorated_with(node, [BUILTINS + ".staticmethod"]):
- # We expect to not take in consideration self.
- all_args = node.args.args
- else:
- all_args = node.args.args[1:]
- mandatory = len(all_args) - len(node.args.defaults)
- optional = len(node.args.defaults)
- current_params = mandatory + optional
-
- if isinstance(expected_params, tuple):
- # The expected number of parameters can be any value from this
- # tuple, although the user should implement the method
- # to take all of them in consideration.
- emit = mandatory not in expected_params
- expected_params = "between %d or %d" % expected_params
- else:
- # If the number of mandatory parameters doesn't
- # suffice, the expected parameters for this
- # function will be deduced from the optional
- # parameters.
- rest = expected_params - mandatory
- if rest == 0:
- emit = False
- elif rest < 0:
- emit = True
- elif rest > 0:
- emit = not ((optional - rest) >= 0 or node.args.vararg)
-
- if emit:
- verb = "was" if current_params <= 1 else "were"
- self.add_message('unexpected-special-method-signature',
- args=(node.name, expected_params, current_params, verb),
- node=node)
-
- @staticmethod
- def _is_iterator(node):
- if node is astroid.YES:
- # Just ignore YES objects.
- return True
- if isinstance(node, Generator):
- # Generators can be itered.
- return True
-
- if isinstance(node, astroid.Instance):
- try:
- node.local_attr(NEXT_METHOD)
- return True
- except astroid.NotFoundError:
- pass
- elif isinstance(node, astroid.ClassDef):
- metaclass = node.metaclass()
- if metaclass and isinstance(metaclass, astroid.ClassDef):
- try:
- metaclass.local_attr(NEXT_METHOD)
- return True
- except astroid.NotFoundError:
- pass
- return False
-
- def _check_iter(self, node):
- infered = _safe_infer_call_result(node, node)
- if infered is not None:
- if not self._is_iterator(infered):
- self.add_message('non-iterator-returned', node=node)
-
- def _check_len(self, node):
- inferred = _safe_infer_call_result(node, node)
- if not inferred:
- return
-
- if not isinstance(inferred, astroid.Const):
- self.add_message('invalid-length-returned', node=node)
- return
-
- value = inferred.value
- if not isinstance(value, six.integer_types) or value < 0:
- self.add_message('invalid-length-returned', node=node)
-
-
-def _ancestors_to_call(klass_node, method='__init__'):
- """return a dictionary where keys are the list of base classes providing
- the queried method, and so that should/may be called from the method node
- """
- to_call = {}
- for base_node in klass_node.ancestors(recurs=False):
- try:
- to_call[base_node] = next(base_node.igetattr(method))
- except astroid.InferenceError:
- continue
- return to_call
-
-
-def node_method(node, method_name):
- """get astroid for on the given class node, ensuring it
- is a Function node
- """
- for node_attr in node.local_attr(method_name):
- if isinstance(node_attr, astroid.Function):
- return node_attr
- raise astroid.NotFoundError(method_name)
-
-def register(linter):
- """required method to auto register this checker """
- linter.register_checker(ClassChecker(linter))
- linter.register_checker(SpecialMethodsChecker(linter))
diff --git a/pymode/libs/pylint/checkers/design_analysis.py b/pymode/libs/pylint/checkers/design_analysis.py
deleted file mode 100644
index c34ec4dc..00000000
--- a/pymode/libs/pylint/checkers/design_analysis.py
+++ /dev/null
@@ -1,334 +0,0 @@
-# Copyright (c) 2006, 2009-2010, 2012-2015 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2014-2016 Claudiu Popa
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""check for signs of poor design"""
-
-from collections import defaultdict
-
-from astroid import If, BoolOp
-from astroid import decorators
-
-from pylint.interfaces import IAstroidChecker
-from pylint.checkers import BaseChecker
-from pylint.checkers.utils import check_messages
-from pylint import utils
-
-
-MSGS = {
- 'R0901': ('Too many ancestors (%s/%s)',
- 'too-many-ancestors',
- 'Used when class has too many parent classes, try to reduce \
- this to get a simpler (and so easier to use) class.'),
- 'R0902': ('Too many instance attributes (%s/%s)',
- 'too-many-instance-attributes',
- 'Used when class has too many instance attributes, try to reduce \
- this to get a simpler (and so easier to use) class.'),
- 'R0903': ('Too few public methods (%s/%s)',
- 'too-few-public-methods',
- 'Used when class has too few public methods, so be sure it\'s \
- really worth it.'),
- 'R0904': ('Too many public methods (%s/%s)',
- 'too-many-public-methods',
- 'Used when class has too many public methods, try to reduce \
- this to get a simpler (and so easier to use) class.'),
-
- 'R0911': ('Too many return statements (%s/%s)',
- 'too-many-return-statements',
- 'Used when a function or method has too many return statement, \
- making it hard to follow.'),
- 'R0912': ('Too many branches (%s/%s)',
- 'too-many-branches',
- 'Used when a function or method has too many branches, \
- making it hard to follow.'),
- 'R0913': ('Too many arguments (%s/%s)',
- 'too-many-arguments',
- 'Used when a function or method takes too many arguments.'),
- 'R0914': ('Too many local variables (%s/%s)',
- 'too-many-locals',
- 'Used when a function or method has too many local variables.'),
- 'R0915': ('Too many statements (%s/%s)',
- 'too-many-statements',
- 'Used when a function or method has too many statements. You \
- should then split it in smaller functions / methods.'),
- 'R0916': ('Too many boolean expressions in if statement (%s/%s)',
- 'too-many-boolean-expressions',
- 'Used when a if statement contains too many boolean '
- 'expressions'),
- }
-
-
-def _count_boolean_expressions(bool_op):
- """Counts the number of boolean expressions in BoolOp `bool_op` (recursive)
-
- example: a and (b or c or (d and e)) ==> 5 boolean expressions
- """
- nb_bool_expr = 0
- for bool_expr in bool_op.get_children():
- if isinstance(bool_expr, BoolOp):
- nb_bool_expr += _count_boolean_expressions(bool_expr)
- else:
- nb_bool_expr += 1
- return nb_bool_expr
-
-
-class MisdesignChecker(BaseChecker):
- """checks for sign of poor/misdesign:
- * number of methods, attributes, local variables...
- * size, complexity of functions, methods
- """
-
- __implements__ = (IAstroidChecker,)
-
- # configuration section name
- name = 'design'
- # messages
- msgs = MSGS
- priority = -2
- # configuration options
- options = (('max-args',
- {'default' : 5, 'type' : 'int', 'metavar' : '',
- 'help': 'Maximum number of arguments for function / method'}
- ),
- ('max-locals',
- {'default' : 15, 'type' : 'int', 'metavar' : '',
- 'help': 'Maximum number of locals for function / method body'}
- ),
- ('max-returns',
- {'default' : 6, 'type' : 'int', 'metavar' : '',
- 'help': 'Maximum number of return / yield for function / '
- 'method body'}
- ),
- ('max-branches',
- {'default' : 12, 'type' : 'int', 'metavar' : '',
- 'help': 'Maximum number of branch for function / method body'}
- ),
- ('max-statements',
- {'default' : 50, 'type' : 'int', 'metavar' : '',
- 'help': 'Maximum number of statements in function / method '
- 'body'}
- ),
- ('max-parents',
- {'default' : 7,
- 'type' : 'int',
- 'metavar' : '',
- 'help' : 'Maximum number of parents for a class (see R0901).'}
- ),
- ('max-attributes',
- {'default' : 7,
- 'type' : 'int',
- 'metavar' : '',
- 'help' : 'Maximum number of attributes for a class \
-(see R0902).'}
- ),
- ('min-public-methods',
- {'default' : 2,
- 'type' : 'int',
- 'metavar' : '',
- 'help' : 'Minimum number of public methods for a class \
-(see R0903).'}
- ),
- ('max-public-methods',
- {'default' : 20,
- 'type' : 'int',
- 'metavar' : '',
- 'help' : 'Maximum number of public methods for a class \
-(see R0904).'}
- ),
- ('max-bool-expr',
- {'default': 5,
- 'type': 'int',
- 'metavar': '',
- 'help': 'Maximum number of boolean expressions in a if '
- 'statement'}
- ),
- )
-
- def __init__(self, linter=None):
- BaseChecker.__init__(self, linter)
- self.stats = None
- self._returns = None
- self._branches = None
- self._stmts = 0
-
- def open(self):
- """initialize visit variables"""
- self.stats = self.linter.add_stats()
- self._returns = []
- self._branches = defaultdict(int)
-
- @decorators.cachedproperty
- def _ignored_argument_names(self):
- return utils.get_global_option(self, 'ignored-argument-names', default=None)
-
- @check_messages('too-many-ancestors', 'too-many-instance-attributes',
- 'too-few-public-methods', 'too-many-public-methods')
- def visit_classdef(self, node):
- """check size of inheritance hierarchy and number of instance attributes
- """
- nb_parents = len(list(node.ancestors()))
- if nb_parents > self.config.max_parents:
- self.add_message('too-many-ancestors', node=node,
- args=(nb_parents, self.config.max_parents))
-
- if len(node.instance_attrs) > self.config.max_attributes:
- self.add_message('too-many-instance-attributes', node=node,
- args=(len(node.instance_attrs),
- self.config.max_attributes))
-
- @check_messages('too-few-public-methods', 'too-many-public-methods')
- def leave_classdef(self, node):
- """check number of public methods"""
- my_methods = sum(1 for method in node.mymethods()
- if not method.name.startswith('_'))
- all_methods = sum(1 for method in node.methods()
- if not method.name.startswith('_'))
-
- # Does the class contain less than n public methods ?
- # This checks only the methods defined in the current class,
- # since the user might not have control over the classes
- # from the ancestors. It avoids some false positives
- # for classes such as unittest.TestCase, which provides
- # a lot of assert methods. It doesn't make sense to warn
- # when the user subclasses TestCase to add his own tests.
- if my_methods > self.config.max_public_methods:
- self.add_message('too-many-public-methods', node=node,
- args=(my_methods,
- self.config.max_public_methods))
- # stop here for exception, metaclass and interface classes
- if node.type != 'class':
- return
-
- # Does the class contain more than n public methods ?
- # This checks all the methods defined by ancestors and
- # by the current class.
- if all_methods < self.config.min_public_methods:
- self.add_message('too-few-public-methods', node=node,
- args=(all_methods,
- self.config.min_public_methods))
-
- @check_messages('too-many-return-statements', 'too-many-branches',
- 'too-many-arguments', 'too-many-locals',
- 'too-many-statements')
- def visit_functiondef(self, node):
- """check function name, docstring, arguments, redefinition,
- variable names, max locals
- """
- # init branch and returns counters
- self._returns.append(0)
- # check number of arguments
- args = node.args.args
- ignored_argument_names = self._ignored_argument_names
- if args is not None:
- ignored_args_num = 0
- if ignored_argument_names:
- ignored_args_num = sum(1 for arg in args if ignored_argument_names.match(arg.name))
-
- argnum = len(args) - ignored_args_num
- if argnum > self.config.max_args:
- self.add_message('too-many-arguments', node=node,
- args=(len(args), self.config.max_args))
- else:
- ignored_args_num = 0
- # check number of local variables
- locnum = len(node.locals) - ignored_args_num
- if locnum > self.config.max_locals:
- self.add_message('too-many-locals', node=node,
- args=(locnum, self.config.max_locals))
- # init statements counter
- self._stmts = 1
-
- visit_asyncfunctiondef = visit_functiondef
-
- @check_messages('too-many-return-statements', 'too-many-branches',
- 'too-many-arguments', 'too-many-locals',
- 'too-many-statements')
- def leave_functiondef(self, node):
- """most of the work is done here on close:
- checks for max returns, branch, return in __init__
- """
- returns = self._returns.pop()
- if returns > self.config.max_returns:
- self.add_message('too-many-return-statements', node=node,
- args=(returns, self.config.max_returns))
- branches = self._branches[node]
- if branches > self.config.max_branches:
- self.add_message('too-many-branches', node=node,
- args=(branches, self.config.max_branches))
- # check number of statements
- if self._stmts > self.config.max_statements:
- self.add_message('too-many-statements', node=node,
- args=(self._stmts, self.config.max_statements))
-
- leave_asyncfunctiondef = leave_functiondef
-
- def visit_return(self, _):
- """count number of returns"""
- if not self._returns:
- return # return outside function, reported by the base checker
- self._returns[-1] += 1
-
- def visit_default(self, node):
- """default visit method -> increments the statements counter if
- necessary
- """
- if node.is_statement:
- self._stmts += 1
-
- def visit_tryexcept(self, node):
- """increments the branches counter"""
- branches = len(node.handlers)
- if node.orelse:
- branches += 1
- self._inc_branch(node, branches)
- self._stmts += branches
-
- def visit_tryfinally(self, node):
- """increments the branches counter"""
- self._inc_branch(node, 2)
- self._stmts += 2
-
- @check_messages('too-many-boolean-expressions')
- def visit_if(self, node):
- """increments the branches counter and checks boolean expressions"""
- self._check_boolean_expressions(node)
- branches = 1
- # don't double count If nodes coming from some 'elif'
- if node.orelse and (len(node.orelse) > 1 or
- not isinstance(node.orelse[0], If)):
- branches += 1
- self._inc_branch(node, branches)
- self._stmts += branches
-
- def _check_boolean_expressions(self, node):
- """Go through "if" node `node` and counts its boolean expressions
-
- if the "if" node test is a BoolOp node
- """
- condition = node.test
- if not isinstance(condition, BoolOp):
- return
- nb_bool_expr = _count_boolean_expressions(condition)
- if nb_bool_expr > self.config.max_bool_expr:
- self.add_message('too-many-boolean-expressions', node=condition,
- args=(nb_bool_expr, self.config.max_bool_expr))
-
- def visit_while(self, node):
- """increments the branches counter"""
- branches = 1
- if node.orelse:
- branches += 1
- self._inc_branch(node, branches)
-
- visit_for = visit_while
-
- def _inc_branch(self, node, branchesnum=1):
- """increments the branches counter"""
- self._branches[node.scope()] += branchesnum
-
-
-def register(linter):
- """required method to auto register this checker """
- linter.register_checker(MisdesignChecker(linter))
diff --git a/pymode/libs/pylint/checkers/exceptions.py b/pymode/libs/pylint/checkers/exceptions.py
deleted file mode 100644
index dde3ae62..00000000
--- a/pymode/libs/pylint/checkers/exceptions.py
+++ /dev/null
@@ -1,389 +0,0 @@
-# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2011, 2013-2014 Google, Inc.
-# Copyright (c) 2013-2016 Claudiu Popa
-# Copyright (c) 2015 Steven Myint
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""Checks for various exception related errors."""
-
-import inspect
-import sys
-
-import six
-from six.moves import builtins
-
-import astroid
-from pylint import checkers
-from pylint.checkers import utils
-from pylint import interfaces
-
-
-def _builtin_exceptions():
- def predicate(obj):
- return isinstance(obj, type) and issubclass(obj, BaseException)
-
- members = inspect.getmembers(six.moves.builtins, predicate)
- return {exc.__name__ for (_, exc) in members}
-
-
-def _annotated_unpack_infer(stmt, context=None):
- """
- Recursively generate nodes inferred by the given statement.
- If the inferred value is a list or a tuple, recurse on the elements.
- Returns an iterator which yields tuples in the format
- ('original node', 'infered node').
- """
- if isinstance(stmt, (astroid.List, astroid.Tuple)):
- for elt in stmt.elts:
- inferred = utils.safe_infer(elt)
- if inferred and inferred is not astroid.YES:
- yield elt, inferred
- return
- for infered in stmt.infer(context):
- if infered is astroid.YES:
- continue
- yield stmt, infered
-
-
-PY3K = sys.version_info >= (3, 0)
-OVERGENERAL_EXCEPTIONS = ('Exception',)
-BUILTINS_NAME = builtins.__name__
-
-MSGS = {
- 'E0701': ('Bad except clauses order (%s)',
- 'bad-except-order',
- 'Used when except clauses are not in the correct order (from the '
- 'more specific to the more generic). If you don\'t fix the order, '
- 'some exceptions may not be caught by the most specific handler.'),
- 'E0702': ('Raising %s while only classes or instances are allowed',
- 'raising-bad-type',
- 'Used when something which is neither a class, an instance or a \
- string is raised (i.e. a `TypeError` will be raised).'),
- 'E0703': ('Exception context set to something which is not an '
- 'exception, nor None',
- 'bad-exception-context',
- 'Used when using the syntax "raise ... from ...", '
- 'where the exception context is not an exception, '
- 'nor None.',
- {'minversion': (3, 0)}),
- 'E0704': ('The raise statement is not inside an except clause',
- 'misplaced-bare-raise',
- 'Used when a bare raise is not used inside an except clause. '
- 'This generates an error, since there are no active exceptions '
- 'to be reraised. An exception to this rule is represented by '
- 'a bare raise inside a finally clause, which might work, as long '
- 'as an exception is raised inside the try block, but it is '
- 'nevertheless a code smell that must not be relied upon.'),
- 'E0710': ('Raising a new style class which doesn\'t inherit from BaseException',
- 'raising-non-exception',
- 'Used when a new style class which doesn\'t inherit from \
- BaseException is raised.'),
- 'E0711': ('NotImplemented raised - should raise NotImplementedError',
- 'notimplemented-raised',
- 'Used when NotImplemented is raised instead of \
- NotImplementedError'),
- 'E0712': ('Catching an exception which doesn\'t inherit from Exception: %s',
- 'catching-non-exception',
- 'Used when a class which doesn\'t inherit from \
- Exception is used as an exception in an except clause.'),
- 'W0702': ('No exception type(s) specified',
- 'bare-except',
- 'Used when an except clause doesn\'t specify exceptions type to \
- catch.'),
- 'W0703': ('Catching too general exception %s',
- 'broad-except',
- 'Used when an except catches a too general exception, \
- possibly burying unrelated errors.'),
- 'W0705': ('Catching previously caught exception type %s',
- 'duplicate-except',
- 'Used when an except catches a type that was already caught by '
- 'a previous handler.'),
- 'W0710': ('Exception doesn\'t inherit from standard "Exception" class',
- 'nonstandard-exception',
- 'Used when a custom exception class is raised but doesn\'t \
- inherit from the builtin "Exception" class.',
- {'maxversion': (3, 0)}),
- 'W0711': ('Exception to catch is the result of a binary "%s" operation',
- 'binary-op-exception',
- 'Used when the exception to catch is of the form \
- "except A or B:". If intending to catch multiple, \
- rewrite as "except (A, B):"'),
- }
-
-
-class BaseVisitor(object):
- """Base class for visitors defined in this module."""
-
- def __init__(self, checker, node):
- self._checker = checker
- self._node = node
-
- def visit(self, node):
- name = node.__class__.__name__.lower()
- dispatch_meth = getattr(self, 'visit_' + name, None)
- if dispatch_meth:
- dispatch_meth(node)
- else:
- self.visit_default(node)
-
- def visit_default(self, node): # pylint: disable=unused-argument
- """Default implementation for all the nodes."""
-
-
-class ExceptionRaiseRefVisitor(BaseVisitor):
- """Visit references (anything that is not an AST leaf)."""
-
- def visit_name(self, name):
- if name.name == 'NotImplemented':
- self._checker.add_message(
- 'notimplemented-raised',
- node=self._node)
-
- def visit_call(self, call):
- if isinstance(call.func, astroid.Name):
- self.visit_name(call.func)
-
-
-class ExceptionRaiseLeafVisitor(BaseVisitor):
- """Visitor for handling leaf kinds of a raise value."""
-
- def visit_const(self, const):
- if not isinstance(const.value, str):
- # raising-string will be emitted from python3 porting checker.
- self._checker.add_message('raising-bad-type', node=self._node,
- args=const.value.__class__.__name__)
-
- def visit_instance(self, instance):
- # pylint: disable=protected-access
- cls = instance._proxied
- self.visit_classdef(cls)
-
- # Exception instances have a particular class type
- visit_exceptioninstance = visit_instance
-
- def visit_classdef(self, cls):
- if (not utils.inherit_from_std_ex(cls) and
- utils.has_known_bases(cls)):
- if cls.newstyle:
- self._checker.add_message('raising-non-exception', node=self._node)
- else:
- self._checker.add_message('nonstandard-exception', node=self._node)
-
- def visit_tuple(self, tuple_node):
- if PY3K or not tuple_node.elts:
- self._checker.add_message('raising-bad-type',
- node=self._node,
- args='tuple')
- return
-
- # On Python 2, using the following is not an error:
- # raise (ZeroDivisionError, None)
- # raise (ZeroDivisionError, )
- # What's left to do is to check that the first
- # argument is indeed an exception. Verifying the other arguments
- # is not the scope of this check.
- first = tuple_node.elts[0]
- inferred = utils.safe_infer(first)
- if not inferred or inferred is astroid.Uninferable:
- return
-
- if (isinstance(inferred, astroid.Instance)
- and inferred.__class__.__name__ != 'Instance'):
- # TODO: explain why
- self.visit_default(tuple_node)
- else:
- self.visit(inferred)
-
- def visit_default(self, node):
- name = getattr(node, 'name', node.__class__.__name__)
- self._checker.add_message('raising-bad-type',
- node=self._node,
- args=name)
-
-
-class ExceptionsChecker(checkers.BaseChecker):
- """Exception related checks."""
-
- __implements__ = interfaces.IAstroidChecker
-
- name = 'exceptions'
- msgs = MSGS
- priority = -4
- options = (('overgeneral-exceptions',
- {'default' : OVERGENERAL_EXCEPTIONS,
- 'type' : 'csv', 'metavar' : '',
- 'help' : 'Exceptions that will emit a warning '
- 'when being caught. Defaults to "%s"' % (
- ', '.join(OVERGENERAL_EXCEPTIONS),)}
- ),
- )
-
- def open(self):
- self._builtin_exceptions = _builtin_exceptions()
- super(ExceptionsChecker, self).open()
-
- @utils.check_messages('nonstandard-exception', 'misplaced-bare-raise',
- 'raising-bad-type', 'raising-non-exception',
- 'notimplemented-raised', 'bad-exception-context')
- def visit_raise(self, node):
- if node.exc is None:
- self._check_misplaced_bare_raise(node)
- return
-
- if PY3K and node.cause:
- self._check_bad_exception_context(node)
-
- expr = node.exc
- try:
- inferred_value = next(expr.infer())
- except astroid.InferenceError:
- inferred_value = None
-
- ExceptionRaiseRefVisitor(self, node).visit(expr)
-
- if inferred_value:
- ExceptionRaiseLeafVisitor(self, node).visit(inferred_value)
-
- def _check_misplaced_bare_raise(self, node):
- # Filter out if it's present in __exit__.
- scope = node.scope()
- if (isinstance(scope, astroid.FunctionDef)
- and scope.is_method()
- and scope.name == '__exit__'):
- return
-
- current = node
- # Stop when a new scope is generated or when the raise
- # statement is found inside a TryFinally.
- ignores = (astroid.ExceptHandler, astroid.FunctionDef, astroid.TryFinally)
- while current and not isinstance(current.parent, ignores):
- current = current.parent
-
- expected = (astroid.ExceptHandler,)
- if not current or not isinstance(current.parent, expected):
- self.add_message('misplaced-bare-raise', node=node)
-
- def _check_bad_exception_context(self, node):
- """Verify that the exception context is properly set.
-
- An exception context can be only `None` or an exception.
- """
- cause = utils.safe_infer(node.cause)
- if cause in (astroid.YES, None):
- return
-
- if isinstance(cause, astroid.Const):
- if cause.value is not None:
- self.add_message('bad-exception-context',
- node=node)
- elif (not isinstance(cause, astroid.ClassDef) and
- not utils.inherit_from_std_ex(cause)):
- self.add_message('bad-exception-context',
- node=node)
-
- def _check_catching_non_exception(self, handler, exc, part):
- if isinstance(exc, astroid.Tuple):
- # Check if it is a tuple of exceptions.
- inferred = [utils.safe_infer(elt) for elt in exc.elts]
- if any(node is astroid.YES for node in inferred):
- # Don't emit if we don't know every component.
- return
- if all(node and utils.inherit_from_std_ex(node)
- for node in inferred):
- return
-
- if not isinstance(exc, astroid.ClassDef):
- # Don't emit the warning if the infered stmt
- # is None, but the exception handler is something else,
- # maybe it was redefined.
- if (isinstance(exc, astroid.Const) and
- exc.value is None):
- if ((isinstance(handler.type, astroid.Const) and
- handler.type.value is None) or
- handler.type.parent_of(exc)):
- # If the exception handler catches None or
- # the exception component, which is None, is
- # defined by the entire exception handler, then
- # emit a warning.
- self.add_message('catching-non-exception',
- node=handler.type,
- args=(part.as_string(), ))
- else:
- self.add_message('catching-non-exception',
- node=handler.type,
- args=(part.as_string(), ))
- return
-
- if (not utils.inherit_from_std_ex(exc) and
- exc.name not in self._builtin_exceptions):
- if utils.has_known_bases(exc):
- self.add_message('catching-non-exception',
- node=handler.type,
- args=(exc.name, ))
-
- @utils.check_messages('bare-except', 'broad-except',
- 'binary-op-exception', 'bad-except-order',
- 'catching-non-exception', 'duplicate-except')
- def visit_tryexcept(self, node):
- """check for empty except"""
- exceptions_classes = []
- nb_handlers = len(node.handlers)
- for index, handler in enumerate(node.handlers):
- if handler.type is None:
- if not utils.is_raising(handler.body):
- self.add_message('bare-except', node=handler)
- # check if a "except:" is followed by some other
- # except
- if index < (nb_handlers - 1):
- msg = 'empty except clause should always appear last'
- self.add_message('bad-except-order', node=node, args=msg)
-
- elif isinstance(handler.type, astroid.BoolOp):
- self.add_message('binary-op-exception',
- node=handler, args=handler.type.op)
- else:
- try:
- excs = list(_annotated_unpack_infer(handler.type))
- except astroid.InferenceError:
- continue
-
- for part, exc in excs:
- if exc is astroid.YES:
- continue
- if (isinstance(exc, astroid.Instance)
- and utils.inherit_from_std_ex(exc)):
- # pylint: disable=protected-access
- exc = exc._proxied
-
- self._check_catching_non_exception(handler, exc, part)
-
- if not isinstance(exc, astroid.ClassDef):
- continue
-
- exc_ancestors = [anc for anc in exc.ancestors()
- if isinstance(anc, astroid.ClassDef)]
-
- for previous_exc in exceptions_classes:
- if previous_exc in exc_ancestors:
- msg = '%s is an ancestor class of %s' % (
- previous_exc.name, exc.name)
- self.add_message('bad-except-order',
- node=handler.type, args=msg)
- if (exc.name in self.config.overgeneral_exceptions
- and exc.root().name == utils.EXCEPTIONS_MODULE
- and not utils.is_raising(handler.body)):
- self.add_message('broad-except',
- args=exc.name, node=handler.type)
-
- if exc in exceptions_classes:
- self.add_message('duplicate-except',
- args=exc.name, node=handler.type)
-
- exceptions_classes += [exc for _, exc in excs]
-
-
-def register(linter):
- """required method to auto register this checker"""
- linter.register_checker(ExceptionsChecker(linter))
diff --git a/pymode/libs/pylint/checkers/format.py b/pymode/libs/pylint/checkers/format.py
deleted file mode 100644
index 691b41bc..00000000
--- a/pymode/libs/pylint/checkers/format.py
+++ /dev/null
@@ -1,1069 +0,0 @@
-# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2013-2015 Google, Inc.
-# Copyright (c) 2014-2016 Claudiu Popa
-# Copyright (c) 2014 Michal Nowikowski
-# Copyright (c) 2015 Mike Frysinger
-# Copyright (c) 2015 Mihai Balint
-# Copyright (c) 2015 Fabio Natali
-# Copyright (c) 2015 Harut
-# Copyright (c) 2016 Ashley Whetter
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""Python code format's checker.
-
-By default try to follow Guido's style guide :
-
-http://www.python.org/doc/essays/styleguide.html
-
-Some parts of the process_token method is based from The Tab Nanny std module.
-"""
-
-import keyword
-import sys
-import tokenize
-from functools import reduce # pylint: disable=redefined-builtin
-
-import six
-from six.moves import zip, map, filter # pylint: disable=redefined-builtin
-
-from astroid import nodes
-
-from pylint.interfaces import ITokenChecker, IAstroidChecker, IRawChecker
-from pylint.checkers import BaseTokenChecker
-from pylint.checkers.utils import check_messages
-from pylint.utils import WarningScope, OPTION_RGX
-
-_ASYNC_TOKEN = 'async'
-_CONTINUATION_BLOCK_OPENERS = ['elif', 'except', 'for', 'if', 'while', 'def', 'class']
-_KEYWORD_TOKENS = ['assert', 'del', 'elif', 'except', 'for', 'if', 'in', 'not',
- 'raise', 'return', 'while', 'yield']
-if sys.version_info < (3, 0):
- _KEYWORD_TOKENS.append('print')
-
-_SPACED_OPERATORS = ['==', '<', '>', '!=', '<>', '<=', '>=',
- '+=', '-=', '*=', '**=', '/=', '//=', '&=', '|=', '^=',
- '%=', '>>=', '<<=']
-_OPENING_BRACKETS = ['(', '[', '{']
-_CLOSING_BRACKETS = [')', ']', '}']
-_TAB_LENGTH = 8
-
-_EOL = frozenset([tokenize.NEWLINE, tokenize.NL, tokenize.COMMENT])
-_JUNK_TOKENS = (tokenize.COMMENT, tokenize.NL)
-
-# Whitespace checking policy constants
-_MUST = 0
-_MUST_NOT = 1
-_IGNORE = 2
-
-# Whitespace checking config constants
-_DICT_SEPARATOR = 'dict-separator'
-_TRAILING_COMMA = 'trailing-comma'
-_EMPTY_LINE = 'empty-line'
-_NO_SPACE_CHECK_CHOICES = [_TRAILING_COMMA, _DICT_SEPARATOR, _EMPTY_LINE]
-_DEFAULT_NO_SPACE_CHECK_CHOICES = [_TRAILING_COMMA, _DICT_SEPARATOR]
-
-MSGS = {
- 'C0301': ('Line too long (%s/%s)',
- 'line-too-long',
- 'Used when a line is longer than a given number of characters.'),
- 'C0302': ('Too many lines in module (%s/%s)', # was W0302
- 'too-many-lines',
- 'Used when a module has too much lines, reducing its readability.'
- ),
- 'C0303': ('Trailing whitespace',
- 'trailing-whitespace',
- 'Used when there is whitespace between the end of a line and the '
- 'newline.'),
- 'C0304': ('Final newline missing',
- 'missing-final-newline',
- 'Used when the last line in a file is missing a newline.'),
- 'C0305': ('Trailing newlines',
- 'trailing-newlines',
- 'Used when there are trailing blank lines in a file.'),
- 'W0311': ('Bad indentation. Found %s %s, expected %s',
- 'bad-indentation',
- 'Used when an unexpected number of indentation\'s tabulations or '
- 'spaces has been found.'),
- 'C0330': ('Wrong %s indentation%s%s.\n%s%s',
- 'bad-continuation',
- 'TODO'),
- 'W0312': ('Found indentation with %ss instead of %ss',
- 'mixed-indentation',
- 'Used when there are some mixed tabs and spaces in a module.'),
- 'W0301': ('Unnecessary semicolon', # was W0106
- 'unnecessary-semicolon',
- 'Used when a statement is ended by a semi-colon (";"), which \
- isn\'t necessary (that\'s python, not C ;).'),
- 'C0321': ('More than one statement on a single line',
- 'multiple-statements',
- 'Used when more than on statement are found on the same line.',
- {'scope': WarningScope.NODE}),
- 'C0325' : ('Unnecessary parens after %r keyword',
- 'superfluous-parens',
- 'Used when a single item in parentheses follows an if, for, or '
- 'other keyword.'),
- 'C0326': ('%s space %s %s %s\n%s',
- 'bad-whitespace',
- ('Used when a wrong number of spaces is used around an operator, '
- 'bracket or block opener.'),
- {'old_names': [('C0323', 'no-space-after-operator'),
- ('C0324', 'no-space-after-comma'),
- ('C0322', 'no-space-before-operator')]}),
- 'W0332': ('Use of "l" as long integer identifier',
- 'lowercase-l-suffix',
- 'Used when a lower case "l" is used to mark a long integer. You '
- 'should use a upper case "L" since the letter "l" looks too much '
- 'like the digit "1"',
- {'maxversion': (3, 0)}),
- 'C0327': ('Mixed line endings LF and CRLF',
- 'mixed-line-endings',
- 'Used when there are mixed (LF and CRLF) newline signs in a file.'),
- 'C0328': ('Unexpected line ending format. There is \'%s\' while it should be \'%s\'.',
- 'unexpected-line-ending-format',
- 'Used when there is different newline than expected.'),
- }
-
-
-def _underline_token(token):
- length = token[3][1] - token[2][1]
- offset = token[2][1]
- referenced_line = token[4]
- # If the referenced line does not end with a newline char, fix it
- if referenced_line[-1] != '\n':
- referenced_line += '\n'
- return referenced_line + (' ' * offset) + ('^' * length)
-
-def _column_distance(token1, token2):
- if token1 == token2:
- return 0
- if token2[3] < token1[3]:
- token1, token2 = token2, token1
- if token1[3][0] != token2[2][0]:
- return None
- return token2[2][1] - token1[3][1]
-
-
-def _last_token_on_line_is(tokens, line_end, token):
- return (line_end > 0 and tokens.token(line_end-1) == token or
- line_end > 1 and tokens.token(line_end-2) == token
- and tokens.type(line_end-1) == tokenize.COMMENT)
-
-
-def _token_followed_by_eol(tokens, position):
- return (tokens.type(position+1) == tokenize.NL or
- tokens.type(position+1) == tokenize.COMMENT and
- tokens.type(position+2) == tokenize.NL)
-
-
-def _get_indent_length(line):
- """Return the length of the indentation on the given token's line."""
- result = 0
- for char in line:
- if char == ' ':
- result += 1
- elif char == '\t':
- result += _TAB_LENGTH
- else:
- break
- return result
-
-
-def _get_indent_hint_line(bar_positions, bad_position):
- """Return a line with |s for each of the positions in the given lists."""
- if not bar_positions:
- return ('', '')
- delta_message = ''
- markers = [(pos, '|') for pos in bar_positions]
- if len(markers) == 1:
- # if we have only one marker we'll provide an extra hint on how to fix
- expected_position = markers[0][0]
- delta = abs(expected_position - bad_position)
- direction = 'add' if expected_position > bad_position else 'remove'
- delta_message = _CONTINUATION_HINT_MESSAGE % (
- direction, delta, 's' if delta > 1 else '')
- markers.append((bad_position, '^'))
- markers.sort()
- line = [' '] * (markers[-1][0] + 1)
- for position, marker in markers:
- line[position] = marker
- return (''.join(line), delta_message)
-
-
-class _ContinuedIndent(object):
- __slots__ = ('valid_outdent_offsets',
- 'valid_continuation_offsets',
- 'context_type',
- 'token',
- 'position')
-
- def __init__(self,
- context_type,
- token,
- position,
- valid_outdent_offsets,
- valid_continuation_offsets):
- self.valid_outdent_offsets = valid_outdent_offsets
- self.valid_continuation_offsets = valid_continuation_offsets
- self.context_type = context_type
- self.position = position
- self.token = token
-
-
-# The contexts for hanging indents.
-# A hanging indented dictionary value after :
-HANGING_DICT_VALUE = 'dict-value'
-# Hanging indentation in an expression.
-HANGING = 'hanging'
-# Hanging indentation in a block header.
-HANGING_BLOCK = 'hanging-block'
-# Continued indentation inside an expression.
-CONTINUED = 'continued'
-# Continued indentation in a block header.
-CONTINUED_BLOCK = 'continued-block'
-
-SINGLE_LINE = 'single'
-WITH_BODY = 'multi'
-
-_CONTINUATION_MSG_PARTS = {
- HANGING_DICT_VALUE: ('hanging', ' in dict value'),
- HANGING: ('hanging', ''),
- HANGING_BLOCK: ('hanging', ' before block'),
- CONTINUED: ('continued', ''),
- CONTINUED_BLOCK: ('continued', ' before block'),
-}
-
-_CONTINUATION_HINT_MESSAGE = ' (%s %d space%s)' # Ex: (remove 2 spaces)
-
-def _Offsets(*args):
- """Valid indentation offsets for a continued line."""
- return dict((a, None) for a in args)
-
-
-def _BeforeBlockOffsets(single, with_body):
- """Valid alternative indent offsets for continued lines before blocks.
-
- :param int single: Valid offset for statements on a single logical line.
- :param int with_body: Valid offset for statements on several lines.
-
- :returns: A dictionary mapping indent offsets to a string representing
- whether the indent if for a line or block.
- :rtype: dict
- """
- return {single: SINGLE_LINE, with_body: WITH_BODY}
-
-
-class TokenWrapper(object):
- """A wrapper for readable access to token information."""
-
- def __init__(self, tokens):
- self._tokens = tokens
-
- def token(self, idx):
- return self._tokens[idx][1]
-
- def type(self, idx):
- return self._tokens[idx][0]
-
- def start_line(self, idx):
- return self._tokens[idx][2][0]
-
- def start_col(self, idx):
- return self._tokens[idx][2][1]
-
- def line(self, idx):
- return self._tokens[idx][4]
-
-
-class ContinuedLineState(object):
- """Tracker for continued indentation inside a logical line."""
-
- def __init__(self, tokens, config):
- self._line_start = -1
- self._cont_stack = []
- self._is_block_opener = False
- self.retained_warnings = []
- self._config = config
- self._tokens = TokenWrapper(tokens)
-
- @property
- def has_content(self):
- return bool(self._cont_stack)
-
- @property
- def _block_indent_size(self):
- return len(self._config.indent_string.replace('\t', ' ' * _TAB_LENGTH))
-
- @property
- def _continuation_size(self):
- return self._config.indent_after_paren
-
- def handle_line_start(self, pos):
- """Record the first non-junk token at the start of a line."""
- if self._line_start > -1:
- return
-
- check_token_position = pos
- if self._tokens.token(pos) == _ASYNC_TOKEN:
- check_token_position += 1
- self._is_block_opener = self._tokens.token(
- check_token_position
- ) in _CONTINUATION_BLOCK_OPENERS
- self._line_start = pos
-
- def next_physical_line(self):
- """Prepares the tracker for a new physical line (NL)."""
- self._line_start = -1
- self._is_block_opener = False
-
- def next_logical_line(self):
- """Prepares the tracker for a new logical line (NEWLINE).
-
- A new logical line only starts with block indentation.
- """
- self.next_physical_line()
- self.retained_warnings = []
- self._cont_stack = []
-
- def add_block_warning(self, token_position, state, valid_offsets):
- self.retained_warnings.append((token_position, state, valid_offsets))
-
- def get_valid_offsets(self, idx):
- """Returns the valid offsets for the token at the given position."""
- # The closing brace on a dict or the 'for' in a dict comprehension may
- # reset two indent levels because the dict value is ended implicitly
- stack_top = -1
- if self._tokens.token(idx) in ('}', 'for') and self._cont_stack[-1].token == ':':
- stack_top = -2
- indent = self._cont_stack[stack_top]
- if self._tokens.token(idx) in _CLOSING_BRACKETS:
- valid_offsets = indent.valid_outdent_offsets
- else:
- valid_offsets = indent.valid_continuation_offsets
- return indent, valid_offsets.copy()
-
- def _hanging_indent_after_bracket(self, bracket, position):
- """Extracts indentation information for a hanging indent."""
- indentation = _get_indent_length(self._tokens.line(position))
- if self._is_block_opener and self._continuation_size == self._block_indent_size:
- return _ContinuedIndent(
- HANGING_BLOCK,
- bracket,
- position,
- _Offsets(indentation + self._continuation_size, indentation),
- _BeforeBlockOffsets(indentation + self._continuation_size,
- indentation + self._continuation_size * 2))
- if bracket == ':':
- # If the dict key was on the same line as the open brace, the new
- # correct indent should be relative to the key instead of the
- # current indent level
- paren_align = self._cont_stack[-1].valid_outdent_offsets
- next_align = self._cont_stack[-1].valid_continuation_offsets.copy()
- next_align_keys = list(next_align.keys())
- next_align[next_align_keys[0] + self._continuation_size] = True
- # Note that the continuation of
- # d = {
- # 'a': 'b'
- # 'c'
- # }
- # is handled by the special-casing for hanging continued string indents.
- return _ContinuedIndent(HANGING_DICT_VALUE, bracket, position, paren_align, next_align)
- return _ContinuedIndent(
- HANGING,
- bracket,
- position,
- _Offsets(indentation, indentation + self._continuation_size),
- _Offsets(indentation + self._continuation_size))
-
- def _continuation_inside_bracket(self, bracket, pos):
- """Extracts indentation information for a continued indent."""
- indentation = _get_indent_length(self._tokens.line(pos))
- token_start = self._tokens.start_col(pos)
- next_token_start = self._tokens.start_col(pos + 1)
- if self._is_block_opener and next_token_start - indentation == self._block_indent_size:
- return _ContinuedIndent(
- CONTINUED_BLOCK,
- bracket,
- pos,
- _Offsets(token_start),
- _BeforeBlockOffsets(next_token_start, next_token_start + self._continuation_size))
- return _ContinuedIndent(
- CONTINUED,
- bracket,
- pos,
- _Offsets(token_start),
- _Offsets(next_token_start))
-
- def pop_token(self):
- self._cont_stack.pop()
-
- def push_token(self, token, position):
- """Pushes a new token for continued indentation on the stack.
-
- Tokens that can modify continued indentation offsets are:
- * opening brackets
- * 'lambda'
- * : inside dictionaries
-
- push_token relies on the caller to filter out those
- interesting tokens.
-
- :param int token: The concrete token
- :param int position: The position of the token in the stream.
- """
- if _token_followed_by_eol(self._tokens, position):
- self._cont_stack.append(
- self._hanging_indent_after_bracket(token, position))
- else:
- self._cont_stack.append(
- self._continuation_inside_bracket(token, position))
-
-
-class FormatChecker(BaseTokenChecker):
- """checks for :
- * unauthorized constructions
- * strict indentation
- * line length
- """
-
- __implements__ = (ITokenChecker, IAstroidChecker, IRawChecker)
-
- # configuration section name
- name = 'format'
- # messages
- msgs = MSGS
- # configuration options
- # for available dict keys/values see the optik parser 'add_option' method
- options = (('max-line-length',
- {'default' : 100, 'type' : "int", 'metavar' : '',
- 'help' : 'Maximum number of characters on a single line.'}),
- ('ignore-long-lines',
- {'type': 'regexp', 'metavar': '',
- 'default': r'^\s*(# )??$',
- 'help': ('Regexp for a line that is allowed to be longer than '
- 'the limit.')}),
- ('single-line-if-stmt',
- {'default': False, 'type' : 'yn', 'metavar' : '',
- 'help' : ('Allow the body of an if to be on the same '
- 'line as the test if there is no else.')}),
- ('single-line-class-stmt',
- {'default': False, 'type' : 'yn', 'metavar' : '',
- 'help' : ('Allow the body of a class to be on the same '
- 'line as the declaration if body contains '
- 'single statement.')}),
- ('no-space-check',
- {'default': ','.join(_DEFAULT_NO_SPACE_CHECK_CHOICES),
- 'metavar': ','.join(_NO_SPACE_CHECK_CHOICES),
- 'type': 'multiple_choice',
- 'choices': _NO_SPACE_CHECK_CHOICES,
- 'help': ('List of optional constructs for which whitespace '
- 'checking is disabled. '
- '`'+ _DICT_SEPARATOR + '` is used to allow tabulation '
- 'in dicts, etc.: {1 : 1,\\n222: 2}. '
- '`'+ _TRAILING_COMMA + '` allows a space between comma '
- 'and closing bracket: (a, ). '
- '`'+ _EMPTY_LINE + '` allows space-only lines.')}),
- ('max-module-lines',
- {'default' : 1000, 'type' : 'int', 'metavar' : '',
- 'help': 'Maximum number of lines in a module'}
- ),
- ('indent-string',
- {'default' : ' ', 'type' : "non_empty_string", 'metavar' : '',
- 'help' : 'String used as indentation unit. This is usually '
- '" " (4 spaces) or "\\t" (1 tab).'}),
- ('indent-after-paren',
- {'type': 'int', 'metavar': '', 'default': 4,
- 'help': 'Number of spaces of indent required inside a hanging '
- ' or continued line.'}),
- ('expected-line-ending-format',
- {'type': 'choice', 'metavar': '', 'default': '',
- 'choices': ['', 'LF', 'CRLF'],
- 'help': ('Expected format of line ending, '
- 'e.g. empty (any line ending), LF or CRLF.')}),
- )
-
- def __init__(self, linter=None):
- BaseTokenChecker.__init__(self, linter)
- self._lines = None
- self._visited_lines = None
- self._bracket_stack = [None]
-
- def _pop_token(self):
- self._bracket_stack.pop()
- self._current_line.pop_token()
-
- def _push_token(self, token, idx):
- self._bracket_stack.append(token)
- self._current_line.push_token(token, idx)
-
- def new_line(self, tokens, line_end, line_start):
- """a new line has been encountered, process it if necessary"""
- if _last_token_on_line_is(tokens, line_end, ';'):
- self.add_message('unnecessary-semicolon', line=tokens.start_line(line_end))
-
- line_num = tokens.start_line(line_start)
- line = tokens.line(line_start)
- if tokens.type(line_start) not in _JUNK_TOKENS:
- self._lines[line_num] = line.split('\n')[0]
- self.check_lines(line, line_num)
-
- def process_module(self, module):
- self._keywords_with_parens = set()
- if 'print_function' in module.future_imports:
- self._keywords_with_parens.add('print')
-
- def _check_keyword_parentheses(self, tokens, start):
- """Check that there are not unnecessary parens after a keyword.
-
- Parens are unnecessary if there is exactly one balanced outer pair on a
- line, and it is followed by a colon, and contains no commas (i.e. is not a
- tuple).
-
- Args:
- tokens: list of Tokens; the entire list of Tokens.
- start: int; the position of the keyword in the token list.
- """
- # If the next token is not a paren, we're fine.
- if self._inside_brackets(':') and tokens[start][1] == 'for':
- self._pop_token()
- if tokens[start+1][1] != '(':
- return
-
- found_and_or = False
- depth = 0
- keyword_token = tokens[start][1]
- line_num = tokens[start][2][0]
-
- for i in range(start, len(tokens) - 1):
- token = tokens[i]
-
- # If we hit a newline, then assume any parens were for continuation.
- if token[0] == tokenize.NL:
- return
-
- if token[1] == '(':
- depth += 1
- elif token[1] == ')':
- depth -= 1
- if depth:
- continue
- # ')' can't happen after if (foo), since it would be a syntax error.
- if (tokens[i+1][1] in (':', ')', ']', '}', 'in') or
- tokens[i+1][0] in (tokenize.NEWLINE,
- tokenize.ENDMARKER,
- tokenize.COMMENT)):
- # The empty tuple () is always accepted.
- if i == start + 2:
- return
- if keyword_token == 'not':
- if not found_and_or:
- self.add_message('superfluous-parens', line=line_num,
- args=keyword_token)
- elif keyword_token in ('return', 'yield'):
- self.add_message('superfluous-parens', line=line_num,
- args=keyword_token)
- elif keyword_token not in self._keywords_with_parens:
- if not (tokens[i+1][1] == 'in' and found_and_or):
- self.add_message('superfluous-parens', line=line_num,
- args=keyword_token)
- return
- elif depth == 1:
- # This is a tuple, which is always acceptable.
- if token[1] == ',':
- return
- # 'and' and 'or' are the only boolean operators with lower precedence
- # than 'not', so parens are only required when they are found.
- elif token[1] in ('and', 'or'):
- found_and_or = True
- # A yield inside an expression must always be in parentheses,
- # quit early without error.
- elif token[1] == 'yield':
- return
- # A generator expression always has a 'for' token in it, and
- # the 'for' token is only legal inside parens when it is in a
- # generator expression. The parens are necessary here, so bail
- # without an error.
- elif token[1] == 'for':
- return
-
- def _opening_bracket(self, tokens, i):
- self._push_token(tokens[i][1], i)
- # Special case: ignore slices
- if tokens[i][1] == '[' and tokens[i+1][1] == ':':
- return
-
- if (i > 0 and (tokens[i-1][0] == tokenize.NAME and
- not (keyword.iskeyword(tokens[i-1][1]))
- or tokens[i-1][1] in _CLOSING_BRACKETS)):
- self._check_space(tokens, i, (_MUST_NOT, _MUST_NOT))
- else:
- self._check_space(tokens, i, (_IGNORE, _MUST_NOT))
-
- def _closing_bracket(self, tokens, i):
- if self._inside_brackets(':'):
- self._pop_token()
- self._pop_token()
- # Special case: ignore slices
- if tokens[i-1][1] == ':' and tokens[i][1] == ']':
- return
- policy_before = _MUST_NOT
- if tokens[i][1] in _CLOSING_BRACKETS and tokens[i-1][1] == ',':
- if _TRAILING_COMMA in self.config.no_space_check:
- policy_before = _IGNORE
-
- self._check_space(tokens, i, (policy_before, _IGNORE))
-
- def _has_valid_type_annotation(self, tokens, i):
- """Extended check of PEP-484 type hint presence"""
- if not self._inside_brackets('('):
- return False
- bracket_level = 0
- for token in tokens[i-1::-1]:
- if token[1] == ':':
- return True
- if token[1] == '(':
- return False
- if token[1] == ']':
- bracket_level += 1
- elif token[1] == '[':
- bracket_level -= 1
- elif token[1] == ',':
- if not bracket_level:
- return False
- elif token[0] not in (tokenize.NAME, tokenize.STRING):
- return False
- return False
-
- def _check_equals_spacing(self, tokens, i):
- """Check the spacing of a single equals sign."""
- if self._has_valid_type_annotation(tokens, i):
- self._check_space(tokens, i, (_MUST, _MUST))
- elif self._inside_brackets('(') or self._inside_brackets('lambda'):
- self._check_space(tokens, i, (_MUST_NOT, _MUST_NOT))
- else:
- self._check_space(tokens, i, (_MUST, _MUST))
-
- def _open_lambda(self, tokens, i): # pylint:disable=unused-argument
- self._push_token('lambda', i)
-
- def _handle_colon(self, tokens, i):
- # Special case: ignore slices
- if self._inside_brackets('['):
- return
- if (self._inside_brackets('{') and
- _DICT_SEPARATOR in self.config.no_space_check):
- policy = (_IGNORE, _IGNORE)
- else:
- policy = (_MUST_NOT, _MUST)
- self._check_space(tokens, i, policy)
-
- if self._inside_brackets('lambda'):
- self._pop_token()
- elif self._inside_brackets('{'):
- self._push_token(':', i)
-
- def _handle_comma(self, tokens, i):
- # Only require a following whitespace if this is
- # not a hanging comma before a closing bracket.
- if tokens[i+1][1] in _CLOSING_BRACKETS:
- self._check_space(tokens, i, (_MUST_NOT, _IGNORE))
- else:
- self._check_space(tokens, i, (_MUST_NOT, _MUST))
- if self._inside_brackets(':'):
- self._pop_token()
-
- def _check_surrounded_by_space(self, tokens, i):
- """Check that a binary operator is surrounded by exactly one space."""
- self._check_space(tokens, i, (_MUST, _MUST))
-
- def _check_space(self, tokens, i, policies):
- def _policy_string(policy):
- if policy == _MUST:
- return 'Exactly one', 'required'
- return 'No', 'allowed'
-
- def _name_construct(token):
- if token[1] == ',':
- return 'comma'
- if token[1] == ':':
- return ':'
- if token[1] in '()[]{}':
- return 'bracket'
- if token[1] in ('<', '>', '<=', '>=', '!=', '=='):
- return 'comparison'
- if self._inside_brackets('('):
- return 'keyword argument assignment'
- return 'assignment'
-
- good_space = [True, True]
- token = tokens[i]
- pairs = [(tokens[i-1], token), (token, tokens[i+1])]
-
- for other_idx, (policy, token_pair) in enumerate(zip(policies, pairs)):
- if token_pair[other_idx][0] in _EOL or policy == _IGNORE:
- continue
-
- distance = _column_distance(*token_pair)
- if distance is None:
- continue
- good_space[other_idx] = (
- (policy == _MUST and distance == 1) or
- (policy == _MUST_NOT and distance == 0))
-
- warnings = []
- if not any(good_space) and policies[0] == policies[1]:
- warnings.append((policies[0], 'around'))
- else:
- for ok, policy, position in zip(good_space, policies, ('before', 'after')):
- if not ok:
- warnings.append((policy, position))
- for policy, position in warnings:
- construct = _name_construct(token)
- count, state = _policy_string(policy)
- self.add_message('bad-whitespace', line=token[2][0],
- args=(count, state, position, construct,
- _underline_token(token)))
-
- def _inside_brackets(self, left):
- return self._bracket_stack[-1] == left
-
- def _prepare_token_dispatcher(self):
- raw = [
- (_KEYWORD_TOKENS,
- self._check_keyword_parentheses),
-
- (_OPENING_BRACKETS, self._opening_bracket),
-
- (_CLOSING_BRACKETS, self._closing_bracket),
-
- (['='], self._check_equals_spacing),
-
- (_SPACED_OPERATORS, self._check_surrounded_by_space),
-
- ([','], self._handle_comma),
-
- ([':'], self._handle_colon),
-
- (['lambda'], self._open_lambda),
-
- ]
-
- dispatch = {}
- for tokens, handler in raw:
- for token in tokens:
- dispatch[token] = handler
- return dispatch
-
- def process_tokens(self, tokens):
- """process tokens and search for :
-
- _ non strict indentation (i.e. not always using the parameter as
- indent unit)
- _ too long lines (i.e. longer than )
- _ optionally bad construct (if given, bad_construct must be a compiled
- regular expression).
- """
- self._bracket_stack = [None]
- indents = [0]
- check_equal = False
- line_num = 0
- self._lines = {}
- self._visited_lines = {}
- token_handlers = self._prepare_token_dispatcher()
- self._last_line_ending = None
- last_blank_line_num = 0
-
- self._current_line = ContinuedLineState(tokens, self.config)
- for idx, (tok_type, token, start, _, line) in enumerate(tokens):
- if start[0] != line_num:
- line_num = start[0]
- # A tokenizer oddity: if an indented line contains a multi-line
- # docstring, the line member of the INDENT token does not contain
- # the full line; therefore we check the next token on the line.
- if tok_type == tokenize.INDENT:
- self.new_line(TokenWrapper(tokens), idx-1, idx+1)
- else:
- self.new_line(TokenWrapper(tokens), idx-1, idx)
-
- if tok_type == tokenize.NEWLINE:
- # a program statement, or ENDMARKER, will eventually follow,
- # after some (possibly empty) run of tokens of the form
- # (NL | COMMENT)* (INDENT | DEDENT+)?
- # If an INDENT appears, setting check_equal is wrong, and will
- # be undone when we see the INDENT.
- check_equal = True
- self._process_retained_warnings(TokenWrapper(tokens), idx)
- self._current_line.next_logical_line()
- self._check_line_ending(token, line_num)
- elif tok_type == tokenize.INDENT:
- check_equal = False
- self.check_indent_level(token, indents[-1]+1, line_num)
- indents.append(indents[-1]+1)
- elif tok_type == tokenize.DEDENT:
- # there's nothing we need to check here! what's important is
- # that when the run of DEDENTs ends, the indentation of the
- # program statement (or ENDMARKER) that triggered the run is
- # equal to what's left at the top of the indents stack
- check_equal = True
- if len(indents) > 1:
- del indents[-1]
- elif tok_type == tokenize.NL:
- if not line.strip('\r\n'):
- last_blank_line_num = line_num
- self._check_continued_indentation(TokenWrapper(tokens), idx+1)
- self._current_line.next_physical_line()
- elif tok_type != tokenize.COMMENT:
- self._current_line.handle_line_start(idx)
- # This is the first concrete token following a NEWLINE, so it
- # must be the first token of the next program statement, or an
- # ENDMARKER; the "line" argument exposes the leading whitespace
- # for this statement; in the case of ENDMARKER, line is an empty
- # string, so will properly match the empty string with which the
- # "indents" stack was seeded
- if check_equal:
- check_equal = False
- self.check_indent_level(line, indents[-1], line_num)
-
- if tok_type == tokenize.NUMBER and token.endswith('l'):
- self.add_message('lowercase-l-suffix', line=line_num)
-
- try:
- handler = token_handlers[token]
- except KeyError:
- pass
- else:
- handler(tokens, idx)
-
- line_num -= 1 # to be ok with "wc -l"
- if line_num > self.config.max_module_lines:
- # Get the line where the too-many-lines (or its message id)
- # was disabled or default to 1.
- symbol = self.linter.msgs_store.check_message_id('too-many-lines')
- names = (symbol.msgid, 'too-many-lines')
- line = next(filter(None,
- map(self.linter._pragma_lineno.get, names)), 1)
- self.add_message('too-many-lines',
- args=(line_num, self.config.max_module_lines),
- line=line)
-
- # See if there are any trailing lines. Do not complain about empty
- # files like __init__.py markers.
- if line_num == last_blank_line_num and line_num > 0:
- self.add_message('trailing-newlines', line=line_num)
-
- def _check_line_ending(self, line_ending, line_num):
- # check if line endings are mixed
- if self._last_line_ending is not None:
- if line_ending != self._last_line_ending:
- self.add_message('mixed-line-endings', line=line_num)
-
- self._last_line_ending = line_ending
-
- # check if line ending is as expected
- expected = self.config.expected_line_ending_format
- if expected:
- # reduce multiple \n\n\n\n to one \n
- line_ending = reduce(lambda x, y: x + y if x != y else x, line_ending, "")
- line_ending = 'LF' if line_ending == '\n' else 'CRLF'
- if line_ending != expected:
- self.add_message('unexpected-line-ending-format', args=(line_ending, expected),
- line=line_num)
-
- def _process_retained_warnings(self, tokens, current_pos):
- single_line_block_stmt = not _last_token_on_line_is(tokens, current_pos, ':')
-
- for indent_pos, state, offsets in self._current_line.retained_warnings:
- block_type = offsets[tokens.start_col(indent_pos)]
- hints = dict((k, v) for k, v in six.iteritems(offsets)
- if v != block_type)
- if single_line_block_stmt and block_type == WITH_BODY:
- self._add_continuation_message(state, hints, tokens, indent_pos)
- elif not single_line_block_stmt and block_type == SINGLE_LINE:
- self._add_continuation_message(state, hints, tokens, indent_pos)
-
- def _check_continued_indentation(self, tokens, next_idx):
- def same_token_around_nl(token_type):
- return (tokens.type(next_idx) == token_type and
- tokens.type(next_idx-2) == token_type)
-
- # Do not issue any warnings if the next line is empty.
- if not self._current_line.has_content or tokens.type(next_idx) == tokenize.NL:
- return
-
- state, valid_offsets = self._current_line.get_valid_offsets(next_idx)
- # Special handling for hanging comments and strings. If the last line ended
- # with a comment (string) and the new line contains only a comment, the line
- # may also be indented to the start of the previous token.
- if same_token_around_nl(tokenize.COMMENT) or same_token_around_nl(tokenize.STRING):
- valid_offsets[tokens.start_col(next_idx-2)] = True
-
- # We can only decide if the indentation of a continued line before opening
- # a new block is valid once we know of the body of the block is on the
- # same line as the block opener. Since the token processing is single-pass,
- # emitting those warnings is delayed until the block opener is processed.
- if (state.context_type in (HANGING_BLOCK, CONTINUED_BLOCK)
- and tokens.start_col(next_idx) in valid_offsets):
- self._current_line.add_block_warning(next_idx, state, valid_offsets)
- elif tokens.start_col(next_idx) not in valid_offsets:
-
- self._add_continuation_message(state, valid_offsets, tokens, next_idx)
-
- def _add_continuation_message(self, state, offsets, tokens, position):
- readable_type, readable_position = _CONTINUATION_MSG_PARTS[state.context_type]
- hint_line, delta_message = _get_indent_hint_line(offsets, tokens.start_col(position))
- self.add_message(
- 'bad-continuation',
- line=tokens.start_line(position),
- args=(readable_type, readable_position, delta_message,
- tokens.line(position), hint_line))
-
- @check_messages('multiple-statements')
- def visit_default(self, node):
- """check the node line number and check it if not yet done"""
- if not node.is_statement:
- return
- if not node.root().pure_python:
- return # XXX block visit of child nodes
- prev_sibl = node.previous_sibling()
- if prev_sibl is not None:
- prev_line = prev_sibl.fromlineno
- else:
- # The line on which a finally: occurs in a try/finally
- # is not directly represented in the AST. We infer it
- # by taking the last line of the body and adding 1, which
- # should be the line of finally:
- if (isinstance(node.parent, nodes.TryFinally)
- and node in node.parent.finalbody):
- prev_line = node.parent.body[0].tolineno + 1
- else:
- prev_line = node.parent.statement().fromlineno
- line = node.fromlineno
- assert line, node
- if prev_line == line and self._visited_lines.get(line) != 2:
- self._check_multi_statement_line(node, line)
- return
- if line in self._visited_lines:
- return
- try:
- tolineno = node.blockstart_tolineno
- except AttributeError:
- tolineno = node.tolineno
- assert tolineno, node
- lines = []
- for line in range(line, tolineno + 1):
- self._visited_lines[line] = 1
- try:
- lines.append(self._lines[line].rstrip())
- except KeyError:
- lines.append('')
-
- def _check_multi_statement_line(self, node, line):
- """Check for lines containing multiple statements."""
- # Do not warn about multiple nested context managers
- # in with statements.
- if isinstance(node, nodes.With):
- return
- # For try... except... finally..., the two nodes
- # appear to be on the same line due to how the AST is built.
- if (isinstance(node, nodes.TryExcept) and
- isinstance(node.parent, nodes.TryFinally)):
- return
- if (isinstance(node.parent, nodes.If) and not node.parent.orelse
- and self.config.single_line_if_stmt):
- return
- if (isinstance(node.parent, nodes.ClassDef) and len(node.parent.body) == 1
- and self.config.single_line_class_stmt):
- return
- self.add_message('multiple-statements', node=node)
- self._visited_lines[line] = 2
-
- def check_lines(self, lines, i):
- """check lines have less than a maximum number of characters
- """
- max_chars = self.config.max_line_length
- ignore_long_line = self.config.ignore_long_lines
-
- def check_line(line, i):
- if not line.endswith('\n'):
- self.add_message('missing-final-newline', line=i)
- else:
- # exclude \f (formfeed) from the rstrip
- stripped_line = line.rstrip('\t\n\r\v ')
- if not stripped_line and _EMPTY_LINE in self.config.no_space_check:
- # allow empty lines
- pass
- elif line[len(stripped_line):] not in ('\n', '\r\n'):
- self.add_message('trailing-whitespace', line=i)
- # Don't count excess whitespace in the line length.
- line = stripped_line
- mobj = OPTION_RGX.search(line)
- if mobj and mobj.group(1).split('=', 1)[0].strip() == 'disable':
- line = line.split('#')[0].rstrip()
-
- if len(line) > max_chars and not ignore_long_line.search(line):
- self.add_message('line-too-long', line=i, args=(len(line), max_chars))
- return i + 1
-
- unsplit_ends = {
- u'\v',
- u'\x0b',
- u'\f',
- u'\x0c',
- u'\x1c',
- u'\x1d',
- u'\x1e',
- u'\x85',
- u'\u2028',
- u'\u2029'
- }
- unsplit = []
- for line in lines.splitlines(True):
- if line[-1] in unsplit_ends:
- unsplit.append(line)
- continue
-
- if unsplit:
- unsplit.append(line)
- line = ''.join(unsplit)
- unsplit = []
-
- i = check_line(line, i)
-
- if unsplit:
- check_line(''.join(unsplit), i)
-
- def check_indent_level(self, string, expected, line_num):
- """return the indent level of the string
- """
- indent = self.config.indent_string
- if indent == '\\t': # \t is not interpreted in the configuration file
- indent = '\t'
- level = 0
- unit_size = len(indent)
- while string[:unit_size] == indent:
- string = string[unit_size:]
- level += 1
- suppl = ''
- while string and string[0] in ' \t':
- if string[0] != indent[0]:
- if string[0] == '\t':
- args = ('tab', 'space')
- else:
- args = ('space', 'tab')
- self.add_message('mixed-indentation', args=args, line=line_num)
- return level
- suppl += string[0]
- string = string[1:]
- if level != expected or suppl:
- i_type = 'spaces'
- if indent[0] == '\t':
- i_type = 'tabs'
- self.add_message('bad-indentation', line=line_num,
- args=(level * unit_size + len(suppl), i_type,
- expected * unit_size))
-
-
-def register(linter):
- """required method to auto register this checker """
- linter.register_checker(FormatChecker(linter))
diff --git a/pymode/libs/pylint/checkers/imports.py b/pymode/libs/pylint/checkers/imports.py
deleted file mode 100644
index 4cae39da..00000000
--- a/pymode/libs/pylint/checkers/imports.py
+++ /dev/null
@@ -1,768 +0,0 @@
-# Copyright (c) 2006-2015 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2012-2014 Google, Inc.
-# Copyright (c) 2014-2016 Claudiu Popa
-# Copyright (c) 2015 Dmitry Pribysh
-# Copyright (c) 2015 Noam Yorav-Raphael
-# Copyright (c) 2015 Cezar
-# Copyright (c) 2015 James Morgensen
-# Copyright (c) 2016 Moises Lopez - https://www.vauxoo.com/
-# Copyright (c) 2016 Ashley Whetter
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""imports checkers for Python code"""
-
-import collections
-from distutils import sysconfig
-import os
-import sys
-import copy
-
-import six
-
-import astroid
-from astroid import are_exclusive
-from astroid.modutils import (get_module_part, is_standard_module)
-import isort
-
-from pylint.interfaces import IAstroidChecker
-from pylint.utils import get_global_option
-from pylint.exceptions import EmptyReportError
-from pylint.checkers import BaseChecker
-from pylint.checkers.utils import (
- check_messages,
- node_ignores_exception,
- is_from_fallback_block
-)
-from pylint.graph import get_cycles, DotBackend
-from pylint.reporters.ureports.nodes import VerbatimText, Paragraph
-
-
-def _qualified_names(modname):
- """Split the names of the given module into subparts
-
- For example,
- _qualified_names('pylint.checkers.ImportsChecker')
- returns
- ['pylint', 'pylint.checkers', 'pylint.checkers.ImportsChecker']
- """
- names = modname.split('.')
- return ['.'.join(names[0:i+1]) for i in range(len(names))]
-
-
-def _get_import_name(importnode, modname):
- """Get a prepared module name from the given import node
-
- In the case of relative imports, this will return the
- absolute qualified module name, which might be useful
- for debugging. Otherwise, the initial module name
- is returned unchanged.
- """
- if isinstance(importnode, astroid.ImportFrom):
- if importnode.level:
- root = importnode.root()
- if isinstance(root, astroid.Module):
- modname = root.relative_to_absolute_name(
- modname, level=importnode.level)
- return modname
-
-
-def _get_first_import(node, context, name, base, level, alias):
- """return the node where [base.] is imported or None if not found
- """
- fullname = '%s.%s' % (base, name) if base else name
-
- first = None
- found = False
- for first in context.body:
- if first is node:
- continue
- if first.scope() is node.scope() and first.fromlineno > node.fromlineno:
- continue
- if isinstance(first, astroid.Import):
- if any(fullname == iname[0] for iname in first.names):
- found = True
- break
- elif isinstance(first, astroid.ImportFrom):
- if level == first.level:
- for imported_name, imported_alias in first.names:
- if fullname == '%s.%s' % (first.modname, imported_name):
- found = True
- break
- if name != '*' and name == imported_name and not (alias or imported_alias):
- found = True
- break
- if found:
- break
- if found and not are_exclusive(first, node):
- return first
-
-
-def _ignore_import_failure(node, modname, ignored_modules):
- for submodule in _qualified_names(modname):
- if submodule in ignored_modules:
- return True
-
- return node_ignores_exception(node, ImportError)
-
-# utilities to represents import dependencies as tree and dot graph ###########
-
-def _make_tree_defs(mod_files_list):
- """get a list of 2-uple (module, list_of_files_which_import_this_module),
- it will return a dictionary to represent this as a tree
- """
- tree_defs = {}
- for mod, files in mod_files_list:
- node = (tree_defs, ())
- for prefix in mod.split('.'):
- node = node[0].setdefault(prefix, [{}, []])
- node[1] += files
- return tree_defs
-
-
-def _repr_tree_defs(data, indent_str=None):
- """return a string which represents imports as a tree"""
- lines = []
- nodes = data.items()
- for i, (mod, (sub, files)) in enumerate(sorted(nodes, key=lambda x: x[0])):
- if not files:
- files = ''
- else:
- files = '(%s)' % ','.join(sorted(files))
- if indent_str is None:
- lines.append('%s %s' % (mod, files))
- sub_indent_str = ' '
- else:
- lines.append(r'%s\-%s %s' % (indent_str, mod, files))
- if i == len(nodes)-1:
- sub_indent_str = '%s ' % indent_str
- else:
- sub_indent_str = '%s| ' % indent_str
- if sub:
- lines.append(_repr_tree_defs(sub, sub_indent_str))
- return '\n'.join(lines)
-
-
-def _dependencies_graph(filename, dep_info):
- """write dependencies as a dot (graphviz) file
- """
- done = {}
- printer = DotBackend(filename[:-4], rankdir='LR')
- printer.emit('URL="." node[shape="box"]')
- for modname, dependencies in sorted(six.iteritems(dep_info)):
- done[modname] = 1
- printer.emit_node(modname)
- for depmodname in dependencies:
- if depmodname not in done:
- done[depmodname] = 1
- printer.emit_node(depmodname)
- for depmodname, dependencies in sorted(six.iteritems(dep_info)):
- for modname in dependencies:
- printer.emit_edge(modname, depmodname)
- printer.generate(filename)
-
-
-def _make_graph(filename, dep_info, sect, gtype):
- """generate a dependencies graph and add some information about it in the
- report's section
- """
- _dependencies_graph(filename, dep_info)
- sect.append(Paragraph('%simports graph has been written to %s'
- % (gtype, filename)))
-
-
-# the import checker itself ###################################################
-
-MSGS = {
- 'E0401': ('Unable to import %s',
- 'import-error',
- 'Used when pylint has been unable to import a module.',
- {'old_names': [('F0401', 'import-error')]}),
- 'E0402': ('Attempted relative import beyond top-level package',
- 'relative-beyond-top-level',
- 'Used when a relative import tries to access too many levels '
- 'in the current package.'),
- 'R0401': ('Cyclic import (%s)',
- 'cyclic-import',
- 'Used when a cyclic import between two or more modules is \
- detected.'),
-
- 'W0401': ('Wildcard import %s',
- 'wildcard-import',
- 'Used when `from module import *` is detected.'),
- 'W0402': ('Uses of a deprecated module %r',
- 'deprecated-module',
- 'Used a module marked as deprecated is imported.'),
- 'W0403': ('Relative import %r, should be %r',
- 'relative-import',
- 'Used when an import relative to the package directory is '
- 'detected.',
- {'maxversion': (3, 0)}),
- 'W0404': ('Reimport %r (imported line %s)',
- 'reimported',
- 'Used when a module is reimported multiple times.'),
- 'W0406': ('Module import itself',
- 'import-self',
- 'Used when a module is importing itself.'),
-
- 'W0410': ('__future__ import is not the first non docstring statement',
- 'misplaced-future',
- 'Python 2.5 and greater require __future__ import to be the \
- first non docstring statement in the module.'),
-
- 'C0410': ('Multiple imports on one line (%s)',
- 'multiple-imports',
- 'Used when import statement importing multiple modules is '
- 'detected.'),
- 'C0411': ('%s should be placed before %s',
- 'wrong-import-order',
- 'Used when PEP8 import order is not respected (standard imports '
- 'first, then third-party libraries, then local imports)'),
- 'C0412': ('Imports from package %s are not grouped',
- 'ungrouped-imports',
- 'Used when imports are not grouped by packages'),
- 'C0413': ('Import "%s" should be placed at the top of the '
- 'module',
- 'wrong-import-position',
- 'Used when code and imports are mixed'),
- }
-
-
-DEFAULT_STANDARD_LIBRARY = ()
-DEFAULT_KNOWN_THIRD_PARTY = ('enchant',)
-
-
-class ImportsChecker(BaseChecker):
- """checks for
- * external modules dependencies
- * relative / wildcard imports
- * cyclic imports
- * uses of deprecated modules
- """
-
- __implements__ = IAstroidChecker
-
- name = 'imports'
- msgs = MSGS
- priority = -2
-
- if six.PY2:
- deprecated_modules = ('regsub', 'TERMIOS', 'Bastion', 'rexec')
- elif sys.version_info < (3, 5):
- deprecated_modules = ('optparse', )
- else:
- deprecated_modules = ('optparse', 'tkinter.tix')
- options = (('deprecated-modules',
- {'default' : deprecated_modules,
- 'type' : 'csv',
- 'metavar' : '',
- 'help' : 'Deprecated modules which should not be used,'
- ' separated by a comma'}
- ),
- ('import-graph',
- {'default' : '',
- 'type' : 'string',
- 'metavar' : '',
- 'help' : 'Create a graph of every (i.e. internal and'
- ' external) dependencies in the given file'
- ' (report RP0402 must not be disabled)'}
- ),
- ('ext-import-graph',
- {'default' : '',
- 'type' : 'string',
- 'metavar' : '',
- 'help' : 'Create a graph of external dependencies in the'
- ' given file (report RP0402 must not be disabled)'}
- ),
- ('int-import-graph',
- {'default' : '',
- 'type' : 'string',
- 'metavar' : '',
- 'help' : 'Create a graph of internal dependencies in the'
- ' given file (report RP0402 must not be disabled)'}
- ),
- ('known-standard-library',
- {'default': DEFAULT_STANDARD_LIBRARY,
- 'type': 'csv',
- 'metavar': '',
- 'help': 'Force import order to recognize a module as part of'
- ' the standard compatibility libraries.'}
- ),
- ('known-third-party',
- {'default': DEFAULT_KNOWN_THIRD_PARTY,
- 'type': 'csv',
- 'metavar': '',
- 'help': 'Force import order to recognize a module as part of'
- ' a third party library.'}
- ),
- ('analyse-fallback-blocks',
- {'default': False,
- 'type': 'yn',
- 'metavar': '',
- 'help': 'Analyse import fallback blocks. This can be used to '
- 'support both Python 2 and 3 compatible code, which means that '
- 'the block might have code that exists only in one or another '
- 'interpreter, leading to false positives when analysed.'},
- ),
- ('allow-wildcard-with-all',
- {'default': False,
- 'type': 'yn',
- 'metavar': '',
- 'help': 'Allow wildcard imports from modules that define __all__.'}),
- )
-
- def __init__(self, linter=None):
- BaseChecker.__init__(self, linter)
- self.stats = None
- self.import_graph = None
- self._imports_stack = []
- self._first_non_import_node = None
- self.__int_dep_info = self.__ext_dep_info = None
- self.reports = (('RP0401', 'External dependencies',
- self._report_external_dependencies),
- ('RP0402', 'Modules dependencies graph',
- self._report_dependencies_graph),
- )
-
- self._site_packages = self._compute_site_packages()
-
- @staticmethod
- def _compute_site_packages():
- def _normalized_path(path):
- return os.path.normcase(os.path.abspath(path))
-
- paths = set()
- real_prefix = getattr(sys, 'real_prefix', None)
- for prefix in filter(None, (real_prefix, sys.prefix)):
- path = sysconfig.get_python_lib(prefix=prefix)
- path = _normalized_path(path)
- paths.add(path)
-
- # Handle Debian's derivatives /usr/local.
- if os.path.isfile("/etc/debian_version"):
- for prefix in filter(None, (real_prefix, sys.prefix)):
- libpython = os.path.join(prefix, "local", "lib",
- "python" + sysconfig.get_python_version(),
- "dist-packages")
- paths.add(libpython)
- return paths
-
- def open(self):
- """called before visiting project (i.e set of modules)"""
- self.linter.add_stats(dependencies={})
- self.linter.add_stats(cycles=[])
- self.stats = self.linter.stats
- self.import_graph = collections.defaultdict(set)
- self._excluded_edges = collections.defaultdict(set)
- self._ignored_modules = get_global_option(
- self, 'ignored-modules', default=[])
-
- def _import_graph_without_ignored_edges(self):
- filtered_graph = copy.deepcopy(self.import_graph)
- for node in filtered_graph:
- filtered_graph[node].difference_update(self._excluded_edges[node])
- return filtered_graph
-
- def close(self):
- """called before visiting project (i.e set of modules)"""
- if self.linter.is_message_enabled('cyclic-import'):
- graph = self._import_graph_without_ignored_edges()
- vertices = list(graph)
- for cycle in get_cycles(graph, vertices=vertices):
- self.add_message('cyclic-import', args=' -> '.join(cycle))
-
- @check_messages('wrong-import-position', 'multiple-imports',
- 'relative-import', 'reimported', 'deprecated-module')
- def visit_import(self, node):
- """triggered when an import statement is seen"""
- self._check_reimport(node)
-
- modnode = node.root()
- names = [name for name, _ in node.names]
- if len(names) >= 2:
- self.add_message('multiple-imports', args=', '.join(names), node=node)
-
- for name in names:
- self._check_deprecated_module(node, name)
- imported_module = self._get_imported_module(node, name)
- if isinstance(node.parent, astroid.Module):
- # Allow imports nested
- self._check_position(node)
- if isinstance(node.scope(), astroid.Module):
- self._record_import(node, imported_module)
-
- if imported_module is None:
- continue
-
- self._check_relative_import(modnode, node, imported_module, name)
- self._add_imported_module(node, imported_module.name)
-
- @check_messages(*(MSGS.keys()))
- def visit_importfrom(self, node):
- """triggered when a from statement is seen"""
- basename = node.modname
- imported_module = self._get_imported_module(node, basename)
-
- self._check_misplaced_future(node)
- self._check_deprecated_module(node, basename)
- self._check_wildcard_imports(node, imported_module)
- self._check_same_line_imports(node)
- self._check_reimport(node, basename=basename, level=node.level)
-
- if isinstance(node.parent, astroid.Module):
- # Allow imports nested
- self._check_position(node)
- if isinstance(node.scope(), astroid.Module):
- self._record_import(node, imported_module)
- if imported_module is None:
- return
- modnode = node.root()
- self._check_relative_import(modnode, node, imported_module, basename)
-
- for name, _ in node.names:
- if name != '*':
- self._add_imported_module(node, '%s.%s' % (imported_module.name, name))
-
- @check_messages('wrong-import-order', 'ungrouped-imports',
- 'wrong-import-position')
- def leave_module(self, node):
- # Check imports are grouped by category (standard, 3rd party, local)
- std_imports, ext_imports, loc_imports = self._check_imports_order(node)
-
- # Check imports are grouped by package within a given category
- met = set()
- current_package = None
- for import_node, import_name in std_imports + ext_imports + loc_imports:
- package, _, _ = import_name.partition('.')
- if current_package and current_package != package and package in met:
- self.add_message('ungrouped-imports', node=import_node,
- args=package)
- current_package = package
- met.add(package)
-
- self._imports_stack = []
- self._first_non_import_node = None
-
- def compute_first_non_import_node(self, node):
- # if the node does not contain an import instruction, and if it is the
- # first node of the module, keep a track of it (all the import positions
- # of the module will be compared to the position of this first
- # instruction)
- if self._first_non_import_node:
- return
- if not isinstance(node.parent, astroid.Module):
- return
- nested_allowed = [astroid.TryExcept, astroid.TryFinally]
- is_nested_allowed = [
- allowed for allowed in nested_allowed if isinstance(node, allowed)]
- if is_nested_allowed and \
- any(node.nodes_of_class((astroid.Import, astroid.ImportFrom))):
- return
- if isinstance(node, astroid.Assign):
- # Add compatibility for module level dunder names
- # https://www.python.org/dev/peps/pep-0008/#module-level-dunder-names
- valid_targets = [
- isinstance(target, astroid.AssignName) and
- target.name.startswith('__') and target.name.endswith('__')
- for target in node.targets]
- if all(valid_targets):
- return
- self._first_non_import_node = node
-
- visit_tryfinally = visit_tryexcept = visit_assignattr = visit_assign = \
- visit_ifexp = visit_comprehension = visit_expr = visit_if = \
- compute_first_non_import_node
-
- def visit_functiondef(self, node):
- # If it is the first non import instruction of the module, record it.
- if self._first_non_import_node:
- return
-
- # Check if the node belongs to an `If` or a `Try` block. If they
- # contain imports, skip recording this node.
- if not isinstance(node.parent.scope(), astroid.Module):
- return
-
- root = node
- while not isinstance(root.parent, astroid.Module):
- root = root.parent
-
- if isinstance(root, (astroid.If, astroid.TryFinally, astroid.TryExcept)):
- if any(root.nodes_of_class((astroid.Import, astroid.ImportFrom))):
- return
-
- self._first_non_import_node = node
-
- visit_classdef = visit_for = visit_while = visit_functiondef
-
- def _check_misplaced_future(self, node):
- basename = node.modname
- if basename == '__future__':
- # check if this is the first non-docstring statement in the module
- prev = node.previous_sibling()
- if prev:
- # consecutive future statements are possible
- if not (isinstance(prev, astroid.ImportFrom)
- and prev.modname == '__future__'):
- self.add_message('misplaced-future', node=node)
- return
-
- def _check_same_line_imports(self, node):
- # Detect duplicate imports on the same line.
- names = (name for name, _ in node.names)
- counter = collections.Counter(names)
- for name, count in counter.items():
- if count > 1:
- self.add_message('reimported', node=node,
- args=(name, node.fromlineno))
-
- def _check_position(self, node):
- """Check `node` import or importfrom node position is correct
-
- Send a message if `node` comes before another instruction
- """
- # if a first non-import instruction has already been encountered,
- # it means the import comes after it and therefore is not well placed
- if self._first_non_import_node:
- self.add_message('wrong-import-position', node=node,
- args=node.as_string())
-
- def _record_import(self, node, importedmodnode):
- """Record the package `node` imports from"""
- importedname = importedmodnode.name if importedmodnode else None
- if not importedname:
- if isinstance(node, astroid.ImportFrom):
- importedname = node.modname
- else:
- importedname = node.names[0][0].split('.')[0]
- if isinstance(node, astroid.ImportFrom) and (node.level or 0) >= 1:
- # We need the impotedname with first point to detect local package
- # Example of node:
- # 'from .my_package1 import MyClass1'
- # the output should be '.my_package1' instead of 'my_package1'
- # Example of node:
- # 'from . import my_package2'
- # the output should be '.my_package2' instead of '{pyfile}'
- importedname = '.' + importedname
- self._imports_stack.append((node, importedname))
-
- @staticmethod
- def _is_fallback_import(node, imports):
- imports = [import_node for (import_node, _) in imports]
- return any(astroid.are_exclusive(import_node, node)
- for import_node in imports)
-
- def _check_imports_order(self, _module_node):
- """Checks imports of module `node` are grouped by category
-
- Imports must follow this order: standard, 3rd party, local
- """
- extern_imports = []
- local_imports = []
- std_imports = []
- extern_not_nested = []
- local_not_nested = []
- isort_obj = isort.SortImports(
- file_contents='', known_third_party=self.config.known_third_party,
- known_standard_library=self.config.known_standard_library,
- )
- for node, modname in self._imports_stack:
- if modname.startswith('.'):
- package = '.' + modname.split('.')[1]
- else:
- package = modname.split('.')[0]
- nested = not isinstance(node.parent, astroid.Module)
- import_category = isort_obj.place_module(package)
- if import_category in ('FUTURE', 'STDLIB'):
- std_imports.append((node, package))
- wrong_import = extern_not_nested or local_not_nested
- if self._is_fallback_import(node, wrong_import):
- continue
- if wrong_import and not nested:
- self.add_message('wrong-import-order', node=node,
- args=('standard import "%s"' % node.as_string(),
- '"%s"' % wrong_import[0][0].as_string()))
- elif import_category in ('FIRSTPARTY', 'THIRDPARTY'):
- extern_imports.append((node, package))
- if not nested:
- extern_not_nested.append((node, package))
- wrong_import = local_not_nested
- if wrong_import and not nested:
- self.add_message('wrong-import-order', node=node,
- args=('external import "%s"' % node.as_string(),
- '"%s"' % wrong_import[0][0].as_string()))
- elif import_category == 'LOCALFOLDER':
- local_imports.append((node, package))
- if not nested:
- local_not_nested.append((node, package))
- return std_imports, extern_imports, local_imports
-
- def _get_imported_module(self, importnode, modname):
- try:
- return importnode.do_import_module(modname)
- except astroid.TooManyLevelsError:
- if _ignore_import_failure(importnode, modname, self._ignored_modules):
- return None
-
- self.add_message('relative-beyond-top-level', node=importnode)
-
- except astroid.AstroidBuildingException:
- if _ignore_import_failure(importnode, modname, self._ignored_modules):
- return None
- if not self.config.analyse_fallback_blocks and is_from_fallback_block(importnode):
- return None
-
- dotted_modname = _get_import_name(importnode, modname)
- self.add_message('import-error', args=repr(dotted_modname),
- node=importnode)
-
- def _check_relative_import(self, modnode, importnode, importedmodnode,
- importedasname):
- """check relative import. node is either an Import or From node, modname
- the imported module name.
- """
- if not self.linter.is_message_enabled('relative-import'):
- return
- if importedmodnode.file is None:
- return False # built-in module
- if modnode is importedmodnode:
- return False # module importing itself
- if modnode.absolute_import_activated() or getattr(importnode, 'level', None):
- return False
- if importedmodnode.name != importedasname:
- # this must be a relative import...
- self.add_message('relative-import',
- args=(importedasname, importedmodnode.name),
- node=importnode)
-
- def _add_imported_module(self, node, importedmodname):
- """notify an imported module, used to analyze dependencies"""
- module_file = node.root().file
- context_name = node.root().name
- base = os.path.splitext(os.path.basename(module_file))[0]
-
- # Determine if we have a `from .something import` in a package's
- # __init__. This means the module will never be able to import
- # itself using this condition (the level will be bigger or
- # if the same module is named as the package, it will be different
- # anyway).
- if isinstance(node, astroid.ImportFrom):
- if node.level and node.level > 0 and base == '__init__':
- return
-
- try:
- importedmodname = get_module_part(importedmodname,
- module_file)
- except ImportError:
- pass
-
- if context_name == importedmodname:
- self.add_message('import-self', node=node)
- elif not is_standard_module(importedmodname):
- # handle dependencies
- importedmodnames = self.stats['dependencies'].setdefault(
- importedmodname, set())
- if context_name not in importedmodnames:
- importedmodnames.add(context_name)
-
- # update import graph
- self.import_graph[context_name].add(importedmodname)
- if not self.linter.is_message_enabled('cyclic-import'):
- self._excluded_edges[context_name].add(importedmodname)
-
- def _check_deprecated_module(self, node, mod_path):
- """check if the module is deprecated"""
- for mod_name in self.config.deprecated_modules:
- if mod_path == mod_name or mod_path.startswith(mod_name + '.'):
- self.add_message('deprecated-module', node=node, args=mod_path)
-
- def _check_reimport(self, node, basename=None, level=None):
- """check if the import is necessary (i.e. not already done)"""
- if not self.linter.is_message_enabled('reimported'):
- return
-
- frame = node.frame()
- root = node.root()
- contexts = [(frame, level)]
- if root is not frame:
- contexts.append((root, None))
-
- for known_context, known_level in contexts:
- for name, alias in node.names:
- first = _get_first_import(
- node, known_context,
- name, basename,
- known_level, alias)
- if first is not None:
- self.add_message('reimported', node=node,
- args=(name, first.fromlineno))
-
- def _report_external_dependencies(self, sect, _, _dummy):
- """return a verbatim layout for displaying dependencies"""
- dep_info = _make_tree_defs(six.iteritems(self._external_dependencies_info()))
- if not dep_info:
- raise EmptyReportError()
- tree_str = _repr_tree_defs(dep_info)
- sect.append(VerbatimText(tree_str))
-
- def _report_dependencies_graph(self, sect, _, _dummy):
- """write dependencies as a dot (graphviz) file"""
- dep_info = self.stats['dependencies']
- if not dep_info or not (self.config.import_graph
- or self.config.ext_import_graph
- or self.config.int_import_graph):
- raise EmptyReportError()
- filename = self.config.import_graph
- if filename:
- _make_graph(filename, dep_info, sect, '')
- filename = self.config.ext_import_graph
- if filename:
- _make_graph(filename, self._external_dependencies_info(),
- sect, 'external ')
- filename = self.config.int_import_graph
- if filename:
- _make_graph(filename, self._internal_dependencies_info(),
- sect, 'internal ')
-
- def _external_dependencies_info(self):
- """return cached external dependencies information or build and
- cache them
- """
- if self.__ext_dep_info is None:
- package = self.linter.current_name
- self.__ext_dep_info = result = {}
- for importee, importers in six.iteritems(self.stats['dependencies']):
- if not importee.startswith(package):
- result[importee] = importers
- return self.__ext_dep_info
-
- def _internal_dependencies_info(self):
- """return cached internal dependencies information or build and
- cache them
- """
- if self.__int_dep_info is None:
- package = self.linter.current_name
- self.__int_dep_info = result = {}
- for importee, importers in six.iteritems(self.stats['dependencies']):
- if importee.startswith(package):
- result[importee] = importers
- return self.__int_dep_info
-
- def _check_wildcard_imports(self, node, imported_module):
- wildcard_import_is_allowed = (
- self._wildcard_import_is_allowed(imported_module)
- )
- for name, _ in node.names:
- if name == '*' and not wildcard_import_is_allowed:
- self.add_message('wildcard-import', args=node.modname, node=node)
-
- def _wildcard_import_is_allowed(self, imported_module):
- return (self.config.allow_wildcard_with_all
- and imported_module is not None
- and '__all__' in imported_module.locals)
-
-
-def register(linter):
- """required method to auto register this checker """
- linter.register_checker(ImportsChecker(linter))
diff --git a/pymode/libs/pylint/checkers/logging.py b/pymode/libs/pylint/checkers/logging.py
deleted file mode 100644
index d9c1fd78..00000000
--- a/pymode/libs/pylint/checkers/logging.py
+++ /dev/null
@@ -1,271 +0,0 @@
-# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2014 Google, Inc.
-# Copyright (c) 2015-2016 Claudiu Popa
-# Copyright (c) 2016 Ashley Whetter
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""checker for use of Python logging
-"""
-import string
-
-import six
-
-import astroid
-
-from pylint import checkers
-from pylint import interfaces
-from pylint.checkers import utils
-from pylint.checkers.utils import check_messages
-
-
-
-MSGS = {
- 'W1201': ('Specify string format arguments as logging function parameters',
- 'logging-not-lazy',
- 'Used when a logging statement has a call form of '
- '"logging.(format_string % (format_args...))". '
- 'Such calls should leave string interpolation to the logging '
- 'method itself and be written '
- '"logging.(format_string, format_args...)" '
- 'so that the program may avoid incurring the cost of the '
- 'interpolation in those cases in which no message will be '
- 'logged. For more, see '
- 'http://www.python.org/dev/peps/pep-0282/.'),
- 'W1202': ('Use % formatting in logging functions and pass the % '
- 'parameters as arguments',
- 'logging-format-interpolation',
- 'Used when a logging statement has a call form of '
- '"logging.(format_string.format(format_args...))"'
- '. Such calls should use % formatting instead, but leave '
- 'interpolation to the logging function by passing the parameters '
- 'as arguments.'),
- 'E1200': ('Unsupported logging format character %r (%#02x) at index %d',
- 'logging-unsupported-format',
- 'Used when an unsupported format character is used in a logging\
- statement format string.'),
- 'E1201': ('Logging format string ends in middle of conversion specifier',
- 'logging-format-truncated',
- 'Used when a logging statement format string terminates before\
- the end of a conversion specifier.'),
- 'E1205': ('Too many arguments for logging format string',
- 'logging-too-many-args',
- 'Used when a logging format string is given too many arguments.'),
- 'E1206': ('Not enough arguments for logging format string',
- 'logging-too-few-args',
- 'Used when a logging format string is given too few arguments.'),
- }
-
-
-CHECKED_CONVENIENCE_FUNCTIONS = {
- 'critical', 'debug', 'error', 'exception', 'fatal', 'info', 'warn', 'warning'
-}
-
-
-def is_method_call(func, types=(), methods=()):
- """Determines if a BoundMethod node represents a method call.
-
- Args:
- func (astroid.BoundMethod): The BoundMethod AST node to check.
- types (Optional[String]): Optional sequence of caller type names to restrict check.
- methods (Optional[String]): Optional sequence of method names to restrict check.
-
- Returns:
- bool: true if the node represents a method call for the given type and
- method names, False otherwise.
- """
- return (isinstance(func, astroid.BoundMethod)
- and isinstance(func.bound, astroid.Instance)
- and (func.bound.name in types if types else True)
- and (func.name in methods if methods else True))
-
-
-class LoggingChecker(checkers.BaseChecker):
- """Checks use of the logging module."""
-
- __implements__ = interfaces.IAstroidChecker
- name = 'logging'
- msgs = MSGS
-
- options = (('logging-modules',
- {'default': ('logging',),
- 'type': 'csv',
- 'metavar': '',
- 'help': 'Logging modules to check that the string format '
- 'arguments are in logging function parameter format'}
- ),
- )
-
- def visit_module(self, node): # pylint: disable=unused-argument
- """Clears any state left in this checker from last module checked."""
- # The code being checked can just as easily "import logging as foo",
- # so it is necessary to process the imports and store in this field
- # what name the logging module is actually given.
- self._logging_names = set()
- logging_mods = self.config.logging_modules
-
- self._logging_modules = set(logging_mods)
- self._from_imports = {}
- for logging_mod in logging_mods:
- parts = logging_mod.rsplit('.', 1)
- if len(parts) > 1:
- self._from_imports[parts[0]] = parts[1]
-
- def visit_importfrom(self, node):
- """Checks to see if a module uses a non-Python logging module."""
- try:
- logging_name = self._from_imports[node.modname]
- for module, as_name in node.names:
- if module == logging_name:
- self._logging_names.add(as_name or module)
- except KeyError:
- pass
-
- def visit_import(self, node):
- """Checks to see if this module uses Python's built-in logging."""
- for module, as_name in node.names:
- if module in self._logging_modules:
- self._logging_names.add(as_name or module)
-
- @check_messages(*(MSGS.keys()))
- def visit_call(self, node):
- """Checks calls to logging methods."""
- def is_logging_name():
- return (isinstance(node.func, astroid.Attribute) and
- isinstance(node.func.expr, astroid.Name) and
- node.func.expr.name in self._logging_names)
-
- def is_logger_class():
- try:
- for inferred in node.func.infer():
- if isinstance(inferred, astroid.BoundMethod):
- parent = inferred._proxied.parent
- if (isinstance(parent, astroid.ClassDef) and
- (parent.qname() == 'logging.Logger' or
- any(ancestor.qname() == 'logging.Logger'
- for ancestor in parent.ancestors()))):
- return True, inferred._proxied.name
- except astroid.exceptions.InferenceError:
- pass
- return False, None
-
- if is_logging_name():
- name = node.func.attrname
- else:
- result, name = is_logger_class()
- if not result:
- return
- self._check_log_method(node, name)
-
- def _check_log_method(self, node, name):
- """Checks calls to logging.log(level, format, *format_args)."""
- if name == 'log':
- if node.starargs or node.kwargs or len(node.args) < 2:
- # Either a malformed call, star args, or double-star args. Beyond
- # the scope of this checker.
- return
- format_pos = 1
- elif name in CHECKED_CONVENIENCE_FUNCTIONS:
- if node.starargs or node.kwargs or not node.args:
- # Either no args, star args, or double-star args. Beyond the
- # scope of this checker.
- return
- format_pos = 0
- else:
- return
-
- if isinstance(node.args[format_pos], astroid.BinOp) and node.args[format_pos].op == '%':
- self.add_message('logging-not-lazy', node=node)
- elif isinstance(node.args[format_pos], astroid.Call):
- self._check_call_func(node.args[format_pos])
- elif isinstance(node.args[format_pos], astroid.Const):
- self._check_format_string(node, format_pos)
-
- def _check_call_func(self, node):
- """Checks that function call is not format_string.format().
-
- Args:
- node (astroid.node_classes.CallFunc):
- CallFunc AST node to be checked.
- """
- func = utils.safe_infer(node.func)
- types = ('str', 'unicode')
- methods = ('format',)
- if is_method_call(func, types, methods) and not is_complex_format_str(func.bound):
- self.add_message('logging-format-interpolation', node=node)
-
- def _check_format_string(self, node, format_arg):
- """Checks that format string tokens match the supplied arguments.
-
- Args:
- node (astroid.node_classes.NodeNG): AST node to be checked.
- format_arg (int): Index of the format string in the node arguments.
- """
- num_args = _count_supplied_tokens(node.args[format_arg + 1:])
- if not num_args:
- # If no args were supplied, then all format strings are valid -
- # don't check any further.
- return
- format_string = node.args[format_arg].value
- if not isinstance(format_string, six.string_types):
- # If the log format is constant non-string (e.g. logging.debug(5)),
- # ensure there are no arguments.
- required_num_args = 0
- else:
- try:
- keyword_args, required_num_args = \
- utils.parse_format_string(format_string)
- if keyword_args:
- # Keyword checking on logging strings is complicated by
- # special keywords - out of scope.
- return
- except utils.UnsupportedFormatCharacter as ex:
- char = format_string[ex.index]
- self.add_message('logging-unsupported-format', node=node,
- args=(char, ord(char), ex.index))
- return
- except utils.IncompleteFormatString:
- self.add_message('logging-format-truncated', node=node)
- return
- if num_args > required_num_args:
- self.add_message('logging-too-many-args', node=node)
- elif num_args < required_num_args:
- self.add_message('logging-too-few-args', node=node)
-
-
-def is_complex_format_str(node):
- """Checks if node represents a string with complex formatting specs.
-
- Args:
- node (astroid.node_classes.NodeNG): AST node to check
- Returns:
- bool: True if inferred string uses complex formatting, False otherwise
- """
- inferred = utils.safe_infer(node)
- if inferred is None or not isinstance(inferred.value, six.string_types):
- return True
- for _, _, format_spec, _ in string.Formatter().parse(inferred.value):
- if format_spec:
- return True
- return False
-
-
-def _count_supplied_tokens(args):
- """Counts the number of tokens in an args list.
-
- The Python log functions allow for special keyword arguments: func,
- exc_info and extra. To handle these cases correctly, we only count
- arguments that aren't keywords.
-
- Args:
- args (list): AST nodes that are arguments for a log format string.
-
- Returns:
- int: Number of AST nodes that aren't keywords.
- """
- return sum(1 for arg in args if not isinstance(arg, astroid.Keyword))
-
-
-def register(linter):
- """Required method to auto-register this checker."""
- linter.register_checker(LoggingChecker(linter))
diff --git a/pymode/libs/pylint/checkers/misc.py b/pymode/libs/pylint/checkers/misc.py
deleted file mode 100644
index 104f0dfc..00000000
--- a/pymode/libs/pylint/checkers/misc.py
+++ /dev/null
@@ -1,99 +0,0 @@
-# Copyright (c) 2006, 2009-2013 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2013-2014 Google, Inc.
-# Copyright (c) 2014 Alexandru Coman
-# Copyright (c) 2014-2016 Claudiu Popa
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-
-"""Check source code is ascii only or has an encoding declaration (PEP 263)"""
-
-# pylint: disable=W0511
-
-import re
-
-import six
-
-from pylint.interfaces import IRawChecker
-from pylint.checkers import BaseChecker
-
-
-MSGS = {
- 'W0511': ('%s',
- 'fixme',
- 'Used when a warning note as FIXME or XXX is detected.'),
- 'W0512': ('Cannot decode using encoding "%s", unexpected byte at position %d',
- 'invalid-encoded-data',
- 'Used when a source line cannot be decoded using the specified '
- 'source file encoding.',
- {'maxversion': (3, 0)}),
-}
-
-
-class EncodingChecker(BaseChecker):
-
- """checks for:
- * warning notes in the code like FIXME, XXX
- * encoding issues.
- """
- __implements__ = IRawChecker
-
- # configuration section name
- name = 'miscellaneous'
- msgs = MSGS
-
- options = (('notes',
- {'type': 'csv', 'metavar': '',
- 'default': ('FIXME', 'XXX', 'TODO'),
- 'help': ('List of note tags to take in consideration, '
- 'separated by a comma.')}),)
-
- def _check_note(self, notes, lineno, line):
- # First, simply check if the notes are in the line at all. This is an
- # optimisation to prevent using the regular expression on every line,
- # but rather only on lines which may actually contain one of the notes.
- # This prevents a pathological problem with lines that are hundreds
- # of thousands of characters long.
- for note in self.config.notes:
- if note in line:
- break
- else:
- return
-
- match = notes.search(line)
- if not match:
- return
- self.add_message('fixme', args=line[match.start(1):].rstrip(), line=lineno)
-
- def _check_encoding(self, lineno, line, file_encoding):
- try:
- return six.text_type(line, file_encoding)
- except UnicodeDecodeError as ex:
- self.add_message('invalid-encoded-data', line=lineno,
- args=(file_encoding, ex.args[2]))
-
- def process_module(self, module):
- """inspect the source file to find encoding problem or fixmes like
- notes
- """
- if self.config.notes:
- notes = re.compile(
- r'.*?#\s*(%s)(:*\s*.*)' % "|".join(self.config.notes))
- else:
- notes = None
- if module.file_encoding:
- encoding = module.file_encoding
- else:
- encoding = 'ascii'
-
- with module.stream() as stream:
- for lineno, line in enumerate(stream):
- line = self._check_encoding(lineno + 1, line, encoding)
- if line is not None and notes:
- self._check_note(notes, lineno + 1, line)
-
-
-def register(linter):
- """required method to auto register this checker"""
- linter.register_checker(EncodingChecker(linter))
diff --git a/pymode/libs/pylint/checkers/newstyle.py b/pymode/libs/pylint/checkers/newstyle.py
deleted file mode 100644
index 6071ea5c..00000000
--- a/pymode/libs/pylint/checkers/newstyle.py
+++ /dev/null
@@ -1,179 +0,0 @@
-# Copyright (c) 2006, 2008-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2013-2016 Claudiu Popa
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""check for new / old style related problems
-"""
-import sys
-
-import astroid
-
-from pylint.interfaces import IAstroidChecker, INFERENCE, INFERENCE_FAILURE, HIGH
-from pylint.checkers import BaseChecker
-from pylint.checkers.utils import (
- check_messages,
- node_frame_class,
- has_known_bases
-)
-
-MSGS = {
- 'E1001': ('Use of __slots__ on an old style class',
- 'slots-on-old-class',
- 'Used when an old style class uses the __slots__ attribute.',
- {'maxversion': (3, 0)}),
- 'E1002': ('Use of super on an old style class',
- 'super-on-old-class',
- 'Used when an old style class uses the super builtin.',
- {'maxversion': (3, 0)}),
- 'E1003': ('Bad first argument %r given to super()',
- 'bad-super-call',
- 'Used when another argument than the current class is given as \
- first argument of the super builtin.'),
- 'E1004': ('Missing argument to super()',
- 'missing-super-argument',
- 'Used when the super builtin didn\'t receive an \
- argument.',
- {'maxversion': (3, 0)}),
- 'W1001': ('Use of "property" on an old style class',
- 'property-on-old-class',
- 'Used when Pylint detect the use of the builtin "property" \
- on an old style class while this is relying on new style \
- classes features.',
- {'maxversion': (3, 0)}),
- 'C1001': ('Old-style class defined.',
- 'old-style-class',
- 'Used when a class is defined that does not inherit from another '
- 'class and does not inherit explicitly from "object".',
- {'maxversion': (3, 0)})
- }
-
-
-class NewStyleConflictChecker(BaseChecker):
- """checks for usage of new style capabilities on old style classes and
- other new/old styles conflicts problems
- * use of property, __slots__, super
- * "super" usage
- """
-
- __implements__ = (IAstroidChecker,)
-
- # configuration section name
- name = 'newstyle'
- # messages
- msgs = MSGS
- priority = -2
- # configuration options
- options = ()
-
- @check_messages('slots-on-old-class', 'old-style-class')
- def visit_classdef(self, node):
- """ Check __slots__ in old style classes and old
- style class definition.
- """
- if '__slots__' in node and not node.newstyle:
- confidence = (INFERENCE if has_known_bases(node)
- else INFERENCE_FAILURE)
- self.add_message('slots-on-old-class', node=node,
- confidence=confidence)
- # The node type could be class, exception, metaclass, or
- # interface. Presumably, the non-class-type nodes would always
- # have an explicit base class anyway.
- if not node.bases and node.type == 'class' and not node.metaclass():
- # We use confidence HIGH here because this message should only ever
- # be emitted for classes at the root of the inheritance hierarchyself.
- self.add_message('old-style-class', node=node, confidence=HIGH)
-
- @check_messages('property-on-old-class')
- def visit_call(self, node):
- """check property usage"""
- parent = node.parent.frame()
- if (isinstance(parent, astroid.ClassDef) and
- not parent.newstyle and
- isinstance(node.func, astroid.Name)):
- confidence = (INFERENCE if has_known_bases(parent)
- else INFERENCE_FAILURE)
- name = node.func.name
- if name == 'property':
- self.add_message('property-on-old-class', node=node,
- confidence=confidence)
-
- @check_messages('super-on-old-class', 'bad-super-call', 'missing-super-argument')
- def visit_functiondef(self, node):
- """check use of super"""
- # ignore actual functions or method within a new style class
- if not node.is_method():
- return
- klass = node.parent.frame()
- for stmt in node.nodes_of_class(astroid.Call):
- if node_frame_class(stmt) != node_frame_class(node):
- # Don't look down in other scopes.
- continue
-
- expr = stmt.func
- if not isinstance(expr, astroid.Attribute):
- continue
-
- call = expr.expr
- # skip the test if using super
- if not (isinstance(call, astroid.Call) and
- isinstance(call.func, astroid.Name) and
- call.func.name == 'super'):
- continue
-
- if not klass.newstyle and has_known_bases(klass):
- # super should not be used on an old style class
- self.add_message('super-on-old-class', node=node)
- else:
- # super first arg should be the class
- if not call.args:
- if sys.version_info[0] == 3:
- # unless Python 3
- continue
- else:
- self.add_message('missing-super-argument', node=call)
- continue
-
- # calling super(type(self), self) can lead to recursion loop
- # in derived classes
- arg0 = call.args[0]
- if isinstance(arg0, astroid.Call) and \
- isinstance(arg0.func, astroid.Name) and \
- arg0.func.name == 'type':
- self.add_message('bad-super-call', node=call, args=('type', ))
- continue
-
- # calling super(self.__class__, self) can lead to recursion loop
- # in derived classes
- if len(call.args) >= 2 and \
- isinstance(call.args[1], astroid.Name) and \
- call.args[1].name == 'self' and \
- isinstance(arg0, astroid.Attribute) and \
- arg0.attrname == '__class__':
- self.add_message('bad-super-call', node=call, args=('self.__class__', ))
- continue
-
- try:
- supcls = call.args and next(call.args[0].infer(), None)
- except astroid.InferenceError:
- continue
-
- if klass is not supcls:
- name = None
- # if supcls is not YES, then supcls was infered
- # and use its name. Otherwise, try to look
- # for call.args[0].name
- if supcls:
- name = supcls.name
- elif call.args and hasattr(call.args[0], 'name'):
- name = call.args[0].name
- if name:
- self.add_message('bad-super-call', node=call, args=(name, ))
-
- visit_asyncfunctiondef = visit_functiondef
-
-
-def register(linter):
- """required method to auto register this checker """
- linter.register_checker(NewStyleConflictChecker(linter))
diff --git a/pymode/libs/pylint/checkers/python3.py b/pymode/libs/pylint/checkers/python3.py
deleted file mode 100644
index 971072ee..00000000
--- a/pymode/libs/pylint/checkers/python3.py
+++ /dev/null
@@ -1,861 +0,0 @@
-# Copyright (c) 2014-2015 Brett Cannon
-# Copyright (c) 2014-2016 Claudiu Popa
-# Copyright (c) 2015 Pavel Roskin
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""Check Python 2 code for Python 2/3 source-compatible issues."""
-from __future__ import absolute_import, print_function
-
-import re
-import sys
-import tokenize
-
-from collections import namedtuple
-
-import six
-
-import astroid
-from astroid import bases
-
-from pylint import checkers, interfaces
-from pylint.interfaces import INFERENCE_FAILURE, INFERENCE
-from pylint.utils import WarningScope
-from pylint.checkers import utils
-
-
-_ZERO = re.compile("^0+$")
-
-def _is_old_octal(literal):
- if _ZERO.match(literal):
- return False
- if re.match(r'0\d+', literal):
- try:
- int(literal, 8)
- except ValueError:
- return False
- return True
-
-def _check_dict_node(node):
- inferred_types = set()
- try:
- inferred = node.infer()
- for inferred_node in inferred:
- inferred_types.add(inferred_node)
- except astroid.InferenceError:
- pass
- return (not inferred_types
- or any(isinstance(x, astroid.Dict) for x in inferred_types))
-
-def _is_builtin(node):
- return getattr(node, 'name', None) in ('__builtin__', 'builtins')
-
-_ACCEPTS_ITERATOR = {'iter', 'list', 'tuple', 'sorted', 'set', 'sum', 'any',
- 'all', 'enumerate', 'dict'}
-
-def _in_iterating_context(node):
- """Check if the node is being used as an iterator.
-
- Definition is taken from lib2to3.fixer_util.in_special_context().
- """
- parent = node.parent
- # Since a call can't be the loop variant we only need to know if the node's
- # parent is a 'for' loop to know it's being used as the iterator for the
- # loop.
- if isinstance(parent, astroid.For):
- return True
- # Need to make sure the use of the node is in the iterator part of the
- # comprehension.
- elif isinstance(parent, astroid.Comprehension):
- if parent.iter == node:
- return True
- # Various built-ins can take in an iterable or list and lead to the same
- # value.
- elif isinstance(parent, astroid.Call):
- if isinstance(parent.func, astroid.Name):
- parent_scope = parent.func.lookup(parent.func.name)[0]
- if _is_builtin(parent_scope) and parent.func.name in _ACCEPTS_ITERATOR:
- return True
- elif isinstance(parent.func, astroid.Attribute):
- if parent.func.attrname == 'join':
- return True
- # If the call is in an unpacking, there's no need to warn,
- # since it can be considered iterating.
- elif (isinstance(parent, astroid.Assign) and
- isinstance(parent.targets[0], (astroid.List, astroid.Tuple))):
- if len(parent.targets[0].elts) > 1:
- return True
- return False
-
-
-def _is_conditional_import(node):
- """Checks if a import node is in the context of a conditional.
- """
- parent = node.parent
- return isinstance(parent, (astroid.TryExcept, astroid.ExceptHandler,
- astroid.If, astroid.IfExp))
-
-Branch = namedtuple('Branch', ['node', 'is_py2_only'])
-
-class Python3Checker(checkers.BaseChecker):
-
- __implements__ = interfaces.IAstroidChecker
- enabled = False
- name = 'python3'
-
- msgs = {
- # Errors for what will syntactically break in Python 3, warnings for
- # everything else.
- 'E1601': ('print statement used',
- 'print-statement',
- 'Used when a print statement is used '
- '(`print` is a function in Python 3)',
- {'maxversion': (3, 0)}),
- 'E1602': ('Parameter unpacking specified',
- 'parameter-unpacking',
- 'Used when parameter unpacking is specified for a function'
- "(Python 3 doesn't allow it)",
- {'maxversion': (3, 0)}),
- 'E1603': ('Implicit unpacking of exceptions is not supported '
- 'in Python 3',
- 'unpacking-in-except',
- 'Python3 will not allow implicit unpacking of '
- 'exceptions in except clauses. '
- 'See http://www.python.org/dev/peps/pep-3110/',
- {'maxversion': (3, 0),
- 'old_names': [('W0712', 'unpacking-in-except')]}),
- 'E1604': ('Use raise ErrorClass(args) instead of '
- 'raise ErrorClass, args.',
- 'old-raise-syntax',
- "Used when the alternate raise syntax "
- "'raise foo, bar' is used "
- "instead of 'raise foo(bar)'.",
- {'maxversion': (3, 0),
- 'old_names': [('W0121', 'old-raise-syntax')]}),
- 'E1605': ('Use of the `` operator',
- 'backtick',
- 'Used when the deprecated "``" (backtick) operator is used '
- 'instead of the str() function.',
- {'scope': WarningScope.NODE,
- 'maxversion': (3, 0),
- 'old_names': [('W0333', 'backtick')]}),
- 'E1609': ('Import * only allowed at module level',
- 'import-star-module-level',
- 'Used when the import star syntax is used somewhere '
- 'else than the module level.',
- {'maxversion': (3, 0)}),
- 'W1601': ('apply built-in referenced',
- 'apply-builtin',
- 'Used when the apply built-in function is referenced '
- '(missing from Python 3)',
- {'maxversion': (3, 0)}),
- 'W1602': ('basestring built-in referenced',
- 'basestring-builtin',
- 'Used when the basestring built-in function is referenced '
- '(missing from Python 3)',
- {'maxversion': (3, 0)}),
- 'W1603': ('buffer built-in referenced',
- 'buffer-builtin',
- 'Used when the buffer built-in function is referenced '
- '(missing from Python 3)',
- {'maxversion': (3, 0)}),
- 'W1604': ('cmp built-in referenced',
- 'cmp-builtin',
- 'Used when the cmp built-in function is referenced '
- '(missing from Python 3)',
- {'maxversion': (3, 0)}),
- 'W1605': ('coerce built-in referenced',
- 'coerce-builtin',
- 'Used when the coerce built-in function is referenced '
- '(missing from Python 3)',
- {'maxversion': (3, 0)}),
- 'W1606': ('execfile built-in referenced',
- 'execfile-builtin',
- 'Used when the execfile built-in function is referenced '
- '(missing from Python 3)',
- {'maxversion': (3, 0)}),
- 'W1607': ('file built-in referenced',
- 'file-builtin',
- 'Used when the file built-in function is referenced '
- '(missing from Python 3)',
- {'maxversion': (3, 0)}),
- 'W1608': ('long built-in referenced',
- 'long-builtin',
- 'Used when the long built-in function is referenced '
- '(missing from Python 3)',
- {'maxversion': (3, 0)}),
- 'W1609': ('raw_input built-in referenced',
- 'raw_input-builtin',
- 'Used when the raw_input built-in function is referenced '
- '(missing from Python 3)',
- {'maxversion': (3, 0)}),
- 'W1610': ('reduce built-in referenced',
- 'reduce-builtin',
- 'Used when the reduce built-in function is referenced '
- '(missing from Python 3)',
- {'maxversion': (3, 0)}),
- 'W1611': ('StandardError built-in referenced',
- 'standarderror-builtin',
- 'Used when the StandardError built-in function is referenced '
- '(missing from Python 3)',
- {'maxversion': (3, 0)}),
- 'W1612': ('unicode built-in referenced',
- 'unicode-builtin',
- 'Used when the unicode built-in function is referenced '
- '(missing from Python 3)',
- {'maxversion': (3, 0)}),
- 'W1613': ('xrange built-in referenced',
- 'xrange-builtin',
- 'Used when the xrange built-in function is referenced '
- '(missing from Python 3)',
- {'maxversion': (3, 0)}),
- 'W1614': ('__coerce__ method defined',
- 'coerce-method',
- 'Used when a __coerce__ method is defined '
- '(method is not used by Python 3)',
- {'maxversion': (3, 0)}),
- 'W1615': ('__delslice__ method defined',
- 'delslice-method',
- 'Used when a __delslice__ method is defined '
- '(method is not used by Python 3)',
- {'maxversion': (3, 0)}),
- 'W1616': ('__getslice__ method defined',
- 'getslice-method',
- 'Used when a __getslice__ method is defined '
- '(method is not used by Python 3)',
- {'maxversion': (3, 0)}),
- 'W1617': ('__setslice__ method defined',
- 'setslice-method',
- 'Used when a __setslice__ method is defined '
- '(method is not used by Python 3)',
- {'maxversion': (3, 0)}),
- 'W1618': ('import missing `from __future__ import absolute_import`',
- 'no-absolute-import',
- 'Used when an import is not accompanied by '
- '``from __future__ import absolute_import`` '
- '(default behaviour in Python 3)',
- {'maxversion': (3, 0)}),
- 'W1619': ('division w/o __future__ statement',
- 'old-division',
- 'Used for non-floor division w/o a float literal or '
- '``from __future__ import division`` '
- '(Python 3 returns a float for int division unconditionally)',
- {'maxversion': (3, 0)}),
- 'W1620': ('Calling a dict.iter*() method',
- 'dict-iter-method',
- 'Used for calls to dict.iterkeys(), itervalues() or iteritems() '
- '(Python 3 lacks these methods)',
- {'maxversion': (3, 0)}),
- 'W1621': ('Calling a dict.view*() method',
- 'dict-view-method',
- 'Used for calls to dict.viewkeys(), viewvalues() or viewitems() '
- '(Python 3 lacks these methods)',
- {'maxversion': (3, 0)}),
- 'W1622': ('Called a next() method on an object',
- 'next-method-called',
- "Used when an object's next() method is called "
- '(Python 3 uses the next() built-in function)',
- {'maxversion': (3, 0)}),
- 'W1623': ("Assigning to a class's __metaclass__ attribute",
- 'metaclass-assignment',
- "Used when a metaclass is specified by assigning to __metaclass__ "
- '(Python 3 specifies the metaclass as a class statement argument)',
- {'maxversion': (3, 0)}),
- 'W1624': ('Indexing exceptions will not work on Python 3',
- 'indexing-exception',
- 'Indexing exceptions will not work on Python 3. Use '
- '`exception.args[index]` instead.',
- {'maxversion': (3, 0),
- 'old_names': [('W0713', 'indexing-exception')]}),
- 'W1625': ('Raising a string exception',
- 'raising-string',
- 'Used when a string exception is raised. This will not '
- 'work on Python 3.',
- {'maxversion': (3, 0),
- 'old_names': [('W0701', 'raising-string')]}),
- 'W1626': ('reload built-in referenced',
- 'reload-builtin',
- 'Used when the reload built-in function is referenced '
- '(missing from Python 3). You can use instead imp.reload '
- 'or importlib.reload.',
- {'maxversion': (3, 0)}),
- 'W1627': ('__oct__ method defined',
- 'oct-method',
- 'Used when a __oct__ method is defined '
- '(method is not used by Python 3)',
- {'maxversion': (3, 0)}),
- 'W1628': ('__hex__ method defined',
- 'hex-method',
- 'Used when a __hex__ method is defined '
- '(method is not used by Python 3)',
- {'maxversion': (3, 0)}),
- 'W1629': ('__nonzero__ method defined',
- 'nonzero-method',
- 'Used when a __nonzero__ method is defined '
- '(method is not used by Python 3)',
- {'maxversion': (3, 0)}),
- 'W1630': ('__cmp__ method defined',
- 'cmp-method',
- 'Used when a __cmp__ method is defined '
- '(method is not used by Python 3)',
- {'maxversion': (3, 0)}),
- # 'W1631': replaced by W1636
- 'W1632': ('input built-in referenced',
- 'input-builtin',
- 'Used when the input built-in is referenced '
- '(backwards-incompatible semantics in Python 3)',
- {'maxversion': (3, 0)}),
- 'W1633': ('round built-in referenced',
- 'round-builtin',
- 'Used when the round built-in is referenced '
- '(backwards-incompatible semantics in Python 3)',
- {'maxversion': (3, 0)}),
- 'W1634': ('intern built-in referenced',
- 'intern-builtin',
- 'Used when the intern built-in is referenced '
- '(Moved to sys.intern in Python 3)',
- {'maxversion': (3, 0)}),
- 'W1635': ('unichr built-in referenced',
- 'unichr-builtin',
- 'Used when the unichr built-in is referenced '
- '(Use chr in Python 3)',
- {'maxversion': (3, 0)}),
- 'W1636': ('map built-in referenced when not iterating',
- 'map-builtin-not-iterating',
- 'Used when the map built-in is referenced in a non-iterating '
- 'context (returns an iterator in Python 3)',
- {'maxversion': (3, 0),
- 'old_names': [('W1631', 'implicit-map-evaluation')]}),
- 'W1637': ('zip built-in referenced when not iterating',
- 'zip-builtin-not-iterating',
- 'Used when the zip built-in is referenced in a non-iterating '
- 'context (returns an iterator in Python 3)',
- {'maxversion': (3, 0)}),
- 'W1638': ('range built-in referenced when not iterating',
- 'range-builtin-not-iterating',
- 'Used when the range built-in is referenced in a non-iterating '
- 'context (returns an iterator in Python 3)',
- {'maxversion': (3, 0)}),
- 'W1639': ('filter built-in referenced when not iterating',
- 'filter-builtin-not-iterating',
- 'Used when the filter built-in is referenced in a non-iterating '
- 'context (returns an iterator in Python 3)',
- {'maxversion': (3, 0)}),
- 'W1640': ('Using the cmp argument for list.sort / sorted',
- 'using-cmp-argument',
- 'Using the cmp argument for list.sort or the sorted '
- 'builtin should be avoided, since it was removed in '
- 'Python 3. Using either `key` or `functools.cmp_to_key` '
- 'should be preferred.',
- {'maxversion': (3, 0)}),
- 'W1641': ('Implementing __eq__ without also implementing __hash__',
- 'eq-without-hash',
- 'Used when a class implements __eq__ but not __hash__. In Python 2, objects '
- 'get object.__hash__ as the default implementation, in Python 3 objects get '
- 'None as their default __hash__ implementation if they also implement __eq__.',
- {'maxversion': (3, 0)}),
- 'W1642': ('__div__ method defined',
- 'div-method',
- 'Used when a __div__ method is defined. Using `__truediv__` and setting'
- '__div__ = __truediv__ should be preferred.'
- '(method is not used by Python 3)',
- {'maxversion': (3, 0)}),
- 'W1643': ('__idiv__ method defined',
- 'idiv-method',
- 'Used when a __idiv__ method is defined. Using `__itruediv__` and setting'
- '__idiv__ = __itruediv__ should be preferred.'
- '(method is not used by Python 3)',
- {'maxversion': (3, 0)}),
- 'W1644': ('__rdiv__ method defined',
- 'rdiv-method',
- 'Used when a __rdiv__ method is defined. Using `__rtruediv__` and setting'
- '__rdiv__ = __rtruediv__ should be preferred.'
- '(method is not used by Python 3)',
- {'maxversion': (3, 0)}),
- 'W1645': ('Exception.message removed in Python 3',
- 'exception-message-attribute',
- 'Used when the message attribute is accessed on an Exception. Use '
- 'str(exception) instead.',
- {'maxversion': (3, 0)}),
- 'W1646': ('non-text encoding used in str.decode',
- 'invalid-str-codec',
- 'Used when using str.encode or str.decode with a non-text encoding. Use '
- 'codecs module to handle arbitrary codecs.',
- {'maxversion': (3, 0)}),
- 'W1647': ('sys.maxint removed in Python 3',
- 'sys-max-int',
- 'Used when accessing sys.maxint. Use sys.maxsize instead.',
- {'maxversion': (3, 0)}),
- 'W1648': ('Module moved in Python 3',
- 'bad-python3-import',
- 'Used when importing a module that no longer exists in Python 3.',
- {'maxversion': (3, 0)}),
- 'W1649': ('Accessing a function method on the string module',
- 'deprecated-string-function',
- 'Used when accessing a string function that has been deprecated in Python 3.',
- {'maxversion': (3, 0)}),
- 'W1650': ('Using str.translate with deprecated deletechars parameters',
- 'deprecated-str-translate-call',
- 'Used when using the deprecated deletechars parameters from str.translate. Use'
- 're.sub to remove the desired characters ',
- {'maxversion': (3, 0)}),
- }
-
- _bad_builtins = frozenset([
- 'apply',
- 'basestring',
- 'buffer',
- 'cmp',
- 'coerce',
- 'execfile',
- 'file',
- 'input', # Not missing, but incompatible semantics
- 'intern',
- 'long',
- 'raw_input',
- 'reduce',
- 'round', # Not missing, but incompatible semantics
- 'StandardError',
- 'unichr',
- 'unicode',
- 'xrange',
- 'reload',
- ])
-
- _unused_magic_methods = frozenset([
- '__coerce__',
- '__delslice__',
- '__getslice__',
- '__setslice__',
- '__oct__',
- '__hex__',
- '__nonzero__',
- '__cmp__',
- '__div__',
- '__idiv__',
- '__rdiv__',
- ])
-
- _invalid_encodings = frozenset([
- 'base64_codec',
- 'base64',
- 'base_64',
- 'bz2_codec',
- 'bz2',
- 'hex_codec',
- 'hex',
- 'quopri_codec',
- 'quopri',
- 'quotedprintable',
- 'quoted_printable',
- 'uu_codec',
- 'uu',
- 'zlib_codec',
- 'zlib',
- 'zip',
- 'rot13',
- 'rot_13',
- ])
-
- _bad_python3_module_map = {
- 'sys-max-int': {
- 'sys': frozenset(['maxint'])
- },
- 'bad-python3-import': frozenset([
- 'anydbm', 'BaseHTTPServer', '__builtin__', 'CGIHTTPServer', 'ConfigParser', 'copy_reg',
- 'cPickle', 'cProfile', 'cStringIO', 'Cookie', 'cookielib', 'dbhash', 'dbm', 'dumbdbm',
- 'dumbdb', 'Dialog', 'DocXMLRPCServer', 'FileDialog', 'FixTk', 'gdbm', 'htmlentitydefs',
- 'HTMLParser', 'httplib', 'markupbase', 'Queue', 'repr', 'robotparser', 'ScrolledText',
- 'SimpleDialog', 'SimpleHTTPServer', 'SimpleXMLRPCServer', 'StringIO', 'dummy_thread',
- 'SocketServer', 'test.test_support', 'Tkinter', 'Tix', 'Tkconstants', 'tkColorChooser',
- 'tkCommonDialog', 'Tkdnd', 'tkFileDialog', 'tkFont', 'tkMessageBox', 'tkSimpleDialog',
- 'turtle', 'UserList', 'UserString', 'whichdb', '_winreg', 'xmlrpclib', 'audiodev',
- 'Bastion', 'bsddb185', 'bsddb3', 'Canvas', 'cfmfile', 'cl', 'commands', 'compiler',
- 'dircache', 'dl', 'exception', 'fpformat', 'htmllib', 'ihooks', 'imageop', 'imputil',
- 'linuxaudiodev', 'md5', 'mhlib', 'mimetools', 'MimeWriter', 'mimify', 'multifile',
- 'mutex', 'new', 'popen2', 'posixfile', 'pure', 'rexec', 'rfc822', 'sha', 'sgmllib',
- 'sre', 'stat', 'stringold', 'sunaudio', 'sv', 'test.testall', 'thread', 'timing',
- 'toaiff', 'user', 'urllib2', 'urlparse'
- ]),
- 'deprecated-string-function': {
- 'string': frozenset([
- 'maketrans', 'atof', 'atoi', 'atol', 'capitalize', 'expandtabs', 'find', 'rfind',
- 'index', 'rindex', 'count', 'lower', 'split', 'rsplit', 'splitfields', 'join',
- 'joinfields', 'lstrip', 'rstrip', 'strip', 'swapcase', 'translate', 'upper',
- 'ljust', 'rjust', 'center', 'zfill', 'replace'
- ])
- }
- }
-
- if (3, 4) <= sys.version_info < (3, 4, 4):
- # Python 3.4.0 -> 3.4.3 has a bug which breaks `repr_tree()`:
- # https://bugs.python.org/issue23572
- _python_2_tests = frozenset()
- else:
- _python_2_tests = frozenset(
- [astroid.extract_node(x).repr_tree() for x in [
- 'sys.version_info[0] == 2',
- 'sys.version_info[0] < 3',
- 'sys.version_info == (2, 7)',
- 'sys.version_info <= (2, 7)',
- 'sys.version_info < (3, 0)',
- ]])
-
- def __init__(self, *args, **kwargs):
- self._future_division = False
- self._future_absolute_import = False
- self._modules_warned_about = set()
- self._branch_stack = []
- super(Python3Checker, self).__init__(*args, **kwargs)
-
- def add_message(self, msg_id, always_warn=False, # pylint: disable=arguments-differ
- *args, **kwargs):
- if always_warn or not (self._branch_stack and self._branch_stack[-1].is_py2_only):
- super(Python3Checker, self).add_message(msg_id, *args, **kwargs)
-
- def _is_py2_test(self, node):
- if isinstance(node.test, astroid.Attribute) and isinstance(node.test.expr, astroid.Name):
- if node.test.expr.name == 'six' and node.test.attrname == 'PY2':
- return True
- elif (isinstance(node.test, astroid.Compare) and
- node.test.repr_tree() in self._python_2_tests):
- return True
- return False
-
- def visit_if(self, node):
- self._branch_stack.append(Branch(node, self._is_py2_test(node)))
-
- def leave_if(self, node):
- assert self._branch_stack.pop().node == node
-
- def visit_ifexp(self, node):
- self._branch_stack.append(Branch(node, self._is_py2_test(node)))
-
- def leave_ifexp(self, node):
- assert self._branch_stack.pop().node == node
-
- def visit_module(self, node): # pylint: disable=unused-argument
- """Clear checker state after previous module."""
- self._future_division = False
- self._future_absolute_import = False
-
- def visit_functiondef(self, node):
- if node.is_method() and node.name in self._unused_magic_methods:
- method_name = node.name
- if node.name.startswith('__'):
- method_name = node.name[2:-2]
- self.add_message(method_name + '-method', node=node)
-
- @utils.check_messages('parameter-unpacking')
- def visit_arguments(self, node):
- for arg in node.args:
- if isinstance(arg, astroid.Tuple):
- self.add_message('parameter-unpacking', node=arg)
-
- def visit_name(self, node):
- """Detect when a "bad" built-in is referenced."""
- found_node = node.lookup(node.name)[0]
- if _is_builtin(found_node):
- if node.name in self._bad_builtins:
- message = node.name.lower() + '-builtin'
- self.add_message(message, node=node)
-
- @utils.check_messages('print-statement')
- def visit_print(self, node):
- self.add_message('print-statement', node=node, always_warn=True)
-
- def _warn_if_deprecated(self, node, module, attributes, report_on_modules=True):
- for message, module_map in six.iteritems(self._bad_python3_module_map):
- if module in module_map and module not in self._modules_warned_about:
- if isinstance(module_map, frozenset):
- if report_on_modules:
- self._modules_warned_about.add(module)
- self.add_message(message, node=node)
- elif attributes and module_map[module].intersection(attributes):
- self.add_message(message, node=node)
-
- def visit_importfrom(self, node):
- if node.modname == '__future__':
- for name, _ in node.names:
- if name == 'division':
- self._future_division = True
- elif name == 'absolute_import':
- self._future_absolute_import = True
- else:
- if not self._future_absolute_import:
- if self.linter.is_message_enabled('no-absolute-import'):
- self.add_message('no-absolute-import', node=node)
- if not _is_conditional_import(node):
- self._warn_if_deprecated(node, node.modname, {x[0] for x in node.names})
-
- if node.names[0][0] == '*':
- if self.linter.is_message_enabled('import-star-module-level'):
- if not isinstance(node.scope(), astroid.Module):
- self.add_message('import-star-module-level', node=node)
-
- def visit_import(self, node):
- if not self._future_absolute_import:
- self.add_message('no-absolute-import', node=node)
- if not _is_conditional_import(node):
- for name, _ in node.names:
- self._warn_if_deprecated(node, name, None)
-
- @utils.check_messages('metaclass-assignment')
- def visit_classdef(self, node):
- if '__metaclass__' in node.locals:
- self.add_message('metaclass-assignment', node=node)
- locals_and_methods = set(node.locals).union(x.name for x in node.mymethods())
- if '__eq__' in locals_and_methods and '__hash__' not in locals_and_methods:
- self.add_message('eq-without-hash', node=node)
-
- @utils.check_messages('old-division')
- def visit_binop(self, node):
- if not self._future_division and node.op == '/':
- for arg in (node.left, node.right):
- if isinstance(arg, astroid.Const) and isinstance(arg.value, float):
- break
- else:
- self.add_message('old-division', node=node)
-
- def _check_cmp_argument(self, node):
- # Check that the `cmp` argument is used
- kwargs = []
- if (isinstance(node.func, astroid.Attribute)
- and node.func.attrname == 'sort'):
- inferred = utils.safe_infer(node.func.expr)
- if not inferred:
- return
-
- builtins_list = "{}.list".format(bases.BUILTINS)
- if (isinstance(inferred, astroid.List)
- or inferred.qname() == builtins_list):
- kwargs = node.keywords
-
- elif (isinstance(node.func, astroid.Name)
- and node.func.name == 'sorted'):
- inferred = utils.safe_infer(node.func)
- if not inferred:
- return
-
- builtins_sorted = "{}.sorted".format(bases.BUILTINS)
- if inferred.qname() == builtins_sorted:
- kwargs = node.keywords
-
- for kwarg in kwargs or []:
- if kwarg.arg == 'cmp':
- self.add_message('using-cmp-argument', node=node)
- return
-
- @staticmethod
- def _is_constant_string_or_name(node):
- if isinstance(node, astroid.Const):
- return isinstance(node.value, six.string_types)
- return isinstance(node, astroid.Name)
-
- @staticmethod
- def _is_none(node):
- return isinstance(node, astroid.Const) and node.value is None
-
- @staticmethod
- def _has_only_n_positional_args(node, number_of_args):
- return len(node.args) == number_of_args and all(node.args) and not node.keywords
-
- @staticmethod
- def _could_be_string(inferred_types):
- confidence = INFERENCE if inferred_types else INFERENCE_FAILURE
- for inferred_type in inferred_types:
- if inferred_type is astroid.Uninferable:
- confidence = INFERENCE_FAILURE
- elif not (isinstance(inferred_type, astroid.Const) and
- isinstance(inferred_type.value, six.string_types)):
- return None
- return confidence
-
- def visit_call(self, node):
- self._check_cmp_argument(node)
-
- if isinstance(node.func, astroid.Attribute):
- inferred_types = set()
- try:
- for inferred_receiver in node.func.expr.infer():
- inferred_types.add(inferred_receiver)
- if isinstance(inferred_receiver, astroid.Module):
- self._warn_if_deprecated(node, inferred_receiver.name,
- {node.func.attrname},
- report_on_modules=False)
- except astroid.InferenceError:
- pass
- if node.args:
- is_str_confidence = self._could_be_string(inferred_types)
- if is_str_confidence:
- if (node.func.attrname in ('encode', 'decode') and
- len(node.args) >= 1 and node.args[0]):
- first_arg = node.args[0]
- self._validate_encoding(first_arg, node)
- if (node.func.attrname == 'translate' and
- self._has_only_n_positional_args(node, 2) and
- self._is_none(node.args[0]) and
- self._is_constant_string_or_name(node.args[1])):
- # The above statement looking for calls of the form:
- #
- # foo.translate(None, 'abc123')
- #
- # or
- #
- # foo.translate(None, some_variable)
- #
- # This check is somewhat broad and _may_ have some false positives, but
- # after checking several large codebases it did not have any false
- # positives while finding several real issues. This call pattern seems
- # rare enough that the trade off is worth it.
- self.add_message('deprecated-str-translate-call',
- node=node,
- confidence=is_str_confidence)
- return
- if node.keywords:
- return
- if node.func.attrname == 'next':
- self.add_message('next-method-called', node=node)
- else:
- if _check_dict_node(node.func.expr):
- if node.func.attrname in ('iterkeys', 'itervalues', 'iteritems'):
- self.add_message('dict-iter-method', node=node)
- elif node.func.attrname in ('viewkeys', 'viewvalues', 'viewitems'):
- self.add_message('dict-view-method', node=node)
- elif isinstance(node.func, astroid.Name):
- found_node = node.func.lookup(node.func.name)[0]
- if _is_builtin(found_node):
- if node.func.name in ('filter', 'map', 'range', 'zip'):
- if not _in_iterating_context(node):
- checker = '{}-builtin-not-iterating'.format(node.func.name)
- self.add_message(checker, node=node)
- if node.func.name == 'open' and node.keywords:
- kwargs = node.keywords
- for kwarg in kwargs or []:
- if kwarg.arg == 'encoding':
- self._validate_encoding(kwarg.value, node)
- break
-
- def _validate_encoding(self, encoding, node):
- if isinstance(encoding, astroid.Const):
- value = encoding.value
- if value in self._invalid_encodings:
- self.add_message('invalid-str-codec',
- node=node)
-
- @utils.check_messages('indexing-exception')
- def visit_subscript(self, node):
- """ Look for indexing exceptions. """
- try:
- for inferred in node.value.infer():
- if not isinstance(inferred, astroid.Instance):
- continue
- if utils.inherit_from_std_ex(inferred):
- self.add_message('indexing-exception', node=node)
- except astroid.InferenceError:
- return
-
- def visit_assignattr(self, node):
- if isinstance(node.assign_type(), astroid.AugAssign):
- self.visit_attribute(node)
-
- def visit_delattr(self, node):
- self.visit_attribute(node)
-
- @utils.check_messages('exception-message-attribute')
- def visit_attribute(self, node):
- """ Look for accessing message on exceptions. """
- try:
- for inferred in node.expr.infer():
- if (isinstance(inferred, astroid.Instance) and
- utils.inherit_from_std_ex(inferred)):
- if node.attrname == 'message':
- self.add_message('exception-message-attribute', node=node)
- if isinstance(inferred, astroid.Module):
- self._warn_if_deprecated(node, inferred.name, {node.attrname},
- report_on_modules=False)
- except astroid.InferenceError:
- return
-
- @utils.check_messages('unpacking-in-except')
- def visit_excepthandler(self, node):
- """Visit an except handler block and check for exception unpacking."""
- if isinstance(node.name, (astroid.Tuple, astroid.List)):
- self.add_message('unpacking-in-except', node=node)
-
- @utils.check_messages('backtick')
- def visit_repr(self, node):
- self.add_message('backtick', node=node)
-
- @utils.check_messages('raising-string', 'old-raise-syntax')
- def visit_raise(self, node):
- """Visit a raise statement and check for raising
- strings or old-raise-syntax.
- """
- if (node.exc is not None and
- node.inst is not None and
- node.tback is None):
- self.add_message('old-raise-syntax', node=node)
-
- # Ignore empty raise.
- if node.exc is None:
- return
- expr = node.exc
- if self._check_raise_value(node, expr):
- return
- else:
- try:
- value = next(astroid.unpack_infer(expr))
- except astroid.InferenceError:
- return
- self._check_raise_value(node, value)
-
- def _check_raise_value(self, node, expr):
- if isinstance(expr, astroid.Const):
- value = expr.value
- if isinstance(value, str):
- self.add_message('raising-string', node=node)
- return True
-
-
-class Python3TokenChecker(checkers.BaseTokenChecker):
- __implements__ = interfaces.ITokenChecker
- name = 'python3'
- enabled = False
-
- msgs = {
- 'E1606': ('Use of long suffix',
- 'long-suffix',
- 'Used when "l" or "L" is used to mark a long integer. '
- 'This will not work in Python 3, since `int` and `long` '
- 'types have merged.',
- {'maxversion': (3, 0)}),
- 'E1607': ('Use of the <> operator',
- 'old-ne-operator',
- 'Used when the deprecated "<>" operator is used instead '
- 'of "!=". This is removed in Python 3.',
- {'maxversion': (3, 0),
- 'old_names': [('W0331', 'old-ne-operator')]}),
- 'E1608': ('Use of old octal literal',
- 'old-octal-literal',
- 'Used when encountering the old octal syntax, '
- 'removed in Python 3. To use the new syntax, '
- 'prepend 0o on the number.',
- {'maxversion': (3, 0)}),
- }
-
- def process_tokens(self, tokens):
- for idx, (tok_type, token, start, _, _) in enumerate(tokens):
- if tok_type == tokenize.NUMBER:
- if token.lower().endswith('l'):
- # This has a different semantic than lowercase-l-suffix.
- self.add_message('long-suffix', line=start[0])
- elif _is_old_octal(token):
- self.add_message('old-octal-literal', line=start[0])
- if tokens[idx][1] == '<>':
- self.add_message('old-ne-operator', line=tokens[idx][2][0])
-
-
-def register(linter):
- linter.register_checker(Python3Checker(linter))
- linter.register_checker(Python3TokenChecker(linter))
diff --git a/pymode/libs/pylint/checkers/raw_metrics.py b/pymode/libs/pylint/checkers/raw_metrics.py
deleted file mode 100644
index 2cceee7e..00000000
--- a/pymode/libs/pylint/checkers/raw_metrics.py
+++ /dev/null
@@ -1,115 +0,0 @@
-# Copyright (c) 2007, 2010, 2013, 2015 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2015-2016 Claudiu Popa
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-""" Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
- http://www.logilab.fr/ -- mailto:contact@logilab.fr
-
-Raw metrics checker
-"""
-
-import tokenize
-
-from pylint.interfaces import ITokenChecker
-from pylint.exceptions import EmptyReportError
-from pylint.checkers import BaseTokenChecker
-from pylint.reporters import diff_string
-from pylint.reporters.ureports.nodes import Table
-
-
-def report_raw_stats(sect, stats, old_stats):
- """calculate percentage of code / doc / comment / empty
- """
- total_lines = stats['total_lines']
- if not total_lines:
- raise EmptyReportError()
- sect.description = '%s lines have been analyzed' % total_lines
- lines = ('type', 'number', '%', 'previous', 'difference')
- for node_type in ('code', 'docstring', 'comment', 'empty'):
- key = node_type + '_lines'
- total = stats[key]
- percent = float(total * 100) / total_lines
- old = old_stats.get(key, None)
- if old is not None:
- diff_str = diff_string(old, total)
- else:
- old, diff_str = 'NC', 'NC'
- lines += (node_type, str(total), '%.2f' % percent,
- str(old), diff_str)
- sect.append(Table(children=lines, cols=5, rheaders=1))
-
-
-class RawMetricsChecker(BaseTokenChecker):
- """does not check anything but gives some raw metrics :
- * total number of lines
- * total number of code lines
- * total number of docstring lines
- * total number of comments lines
- * total number of empty lines
- """
-
- __implements__ = (ITokenChecker,)
-
- # configuration section name
- name = 'metrics'
- # configuration options
- options = ()
- # messages
- msgs = {}
- # reports
- reports = (('RP0701', 'Raw metrics', report_raw_stats),)
-
- def __init__(self, linter):
- BaseTokenChecker.__init__(self, linter)
- self.stats = None
-
- def open(self):
- """init statistics"""
- self.stats = self.linter.add_stats(total_lines=0, code_lines=0,
- empty_lines=0, docstring_lines=0,
- comment_lines=0)
-
- def process_tokens(self, tokens):
- """update stats"""
- i = 0
- tokens = list(tokens)
- while i < len(tokens):
- i, lines_number, line_type = get_type(tokens, i)
- self.stats['total_lines'] += lines_number
- self.stats[line_type] += lines_number
-
-
-JUNK = (tokenize.NL, tokenize.INDENT, tokenize.NEWLINE, tokenize.ENDMARKER)
-
-def get_type(tokens, start_index):
- """return the line type : docstring, comment, code, empty"""
- i = start_index
- tok_type = tokens[i][0]
- start = tokens[i][2]
- pos = start
- line_type = None
- while i < len(tokens) and tokens[i][2][0] == start[0]:
- tok_type = tokens[i][0]
- pos = tokens[i][3]
- if line_type is None:
- if tok_type == tokenize.STRING:
- line_type = 'docstring_lines'
- elif tok_type == tokenize.COMMENT:
- line_type = 'comment_lines'
- elif tok_type in JUNK:
- pass
- else:
- line_type = 'code_lines'
- i += 1
- if line_type is None:
- line_type = 'empty_lines'
- elif i < len(tokens) and tokens[i][0] == tokenize.NEWLINE:
- i += 1
- return i, pos[0] - start[0] + 1, line_type
-
-
-def register(linter):
- """ required method to auto register this checker """
- linter.register_checker(RawMetricsChecker(linter))
diff --git a/pymode/libs/pylint/checkers/refactoring.py b/pymode/libs/pylint/checkers/refactoring.py
deleted file mode 100644
index 418e63ef..00000000
--- a/pymode/libs/pylint/checkers/refactoring.py
+++ /dev/null
@@ -1,715 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2016 Moisés López
-# Copyright (c) 2016 Claudiu Popa
-# Copyright (c) 2016 Alexander Todorov
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""Looks for code which can be refactored."""
-
-import collections
-import itertools
-import tokenize
-
-import astroid
-from astroid import decorators
-import six
-
-from pylint import interfaces
-from pylint import checkers
-from pylint import utils as lint_utils
-from pylint.checkers import utils
-
-
-def _all_elements_are_true(gen):
- values = list(gen)
- return values and all(values)
-
-
-def _if_statement_is_always_returning(if_node):
- def _has_return_node(elems, scope):
- for node in elems:
- if isinstance(node, astroid.If):
- yield _if_statement_is_always_returning(node)
- elif isinstance(node, astroid.Return):
- yield node.scope() is scope
-
- scope = if_node.scope()
- body_returns = _all_elements_are_true(
- _has_return_node(if_node.body, scope=scope)
- )
- if if_node.orelse:
- orelse_returns = _all_elements_are_true(
- _has_return_node(if_node.orelse, scope=scope)
- )
- else:
- orelse_returns = False
-
- return body_returns and orelse_returns
-
-
-class RefactoringChecker(checkers.BaseTokenChecker):
- """Looks for code which can be refactored
-
- This checker also mixes the astroid and the token approaches
- in order to create knowledge about whether a "else if" node
- is a true "else if" node, or a "elif" node.
- """
-
- __implements__ = (interfaces.ITokenChecker, interfaces.IAstroidChecker)
-
- name = 'refactoring'
-
- msgs = {
- 'R1701': ("Consider merging these isinstance calls to isinstance(%s, (%s))",
- "consider-merging-isinstance",
- "Used when multiple consecutive isinstance calls can be merged into one."),
- 'R1706': ("Consider using ternary (%s if %s else %s)",
- "consider-using-ternary",
- "Used when one of known pre-python 2.5 ternary syntax is used."),
- 'R1702': ('Too many nested blocks (%s/%s)',
- 'too-many-nested-blocks',
- 'Used when a function or a method has too many nested '
- 'blocks. This makes the code less understandable and '
- 'maintainable.',
- {'old_names': [('R0101', 'too-many-nested-blocks')]}),
- 'R1703': ('The if statement can be replaced with %s',
- 'simplifiable-if-statement',
- 'Used when an if statement can be replaced with '
- '\'bool(test)\'. ',
- {'old_names': [('R0102', 'simplifiable-if-statement')]}),
- 'R1704': ('Redefining argument with the local name %r',
- 'redefined-argument-from-local',
- 'Used when a local name is redefining an argument, which might '
- 'suggest a potential error. This is taken in account only for '
- 'a handful of name binding operations, such as for iteration, '
- 'with statement assignment and exception handler assignment.'
- ),
- 'R1705': ('Unnecessary "else" after "return"',
- 'no-else-return',
- 'Used in order to highlight an unnecessary block of '
- 'code following an if containing a return statement. '
- 'As such, it will warn when it encounters an else '
- 'following a chain of ifs, all of them containing a '
- 'return statement.'
- ),
- 'R1707': ('Disallow trailing comma tuple',
- 'trailing-comma-tuple',
- 'In Python, a tuple is actually created by the comma symbol, '
- 'not by the parentheses. Unfortunately, one can actually create a '
- 'tuple by misplacing a trailing comma, which can lead to potential '
- 'weird bugs in your code. You should always use parentheses '
- 'explicitly for creating a tuple.',
- {'minversion': (3, 0)}),
- }
- options = (('max-nested-blocks',
- {'default': 5, 'type': 'int', 'metavar': '',
- 'help': 'Maximum number of nested blocks for function / '
- 'method body'}
- ),)
-
- priority = 0
-
- def __init__(self, linter=None):
- checkers.BaseTokenChecker.__init__(self, linter)
- self._init()
-
- def _init(self):
- self._nested_blocks = []
- self._elifs = []
- self._if_counter = 0
- self._nested_blocks_msg = None
-
- @decorators.cachedproperty
- def _dummy_rgx(self):
- return lint_utils.get_global_option(
- self, 'dummy-variables-rgx', default=None)
-
- @staticmethod
- def _is_bool_const(node):
- return (isinstance(node.value, astroid.Const)
- and isinstance(node.value.value, bool))
-
- def _is_actual_elif(self, node):
- """Check if the given node is an actual elif
-
- This is a problem we're having with the builtin ast module,
- which splits `elif` branches into a separate if statement.
- Unfortunately we need to know the exact type in certain
- cases.
- """
-
- if isinstance(node.parent, astroid.If):
- orelse = node.parent.orelse
- # current if node must directly follow a "else"
- if orelse and orelse == [node]:
- if self._elifs[self._if_counter]:
- return True
- return False
-
- def _check_simplifiable_if(self, node):
- """Check if the given if node can be simplified.
-
- The if statement can be reduced to a boolean expression
- in some cases. For instance, if there are two branches
- and both of them return a boolean value that depends on
- the result of the statement's test, then this can be reduced
- to `bool(test)` without losing any functionality.
- """
-
- if self._is_actual_elif(node):
- # Not interested in if statements with multiple branches.
- return
- if len(node.orelse) != 1 or len(node.body) != 1:
- return
-
- # Check if both branches can be reduced.
- first_branch = node.body[0]
- else_branch = node.orelse[0]
- if isinstance(first_branch, astroid.Return):
- if not isinstance(else_branch, astroid.Return):
- return
- first_branch_is_bool = self._is_bool_const(first_branch)
- else_branch_is_bool = self._is_bool_const(else_branch)
- reduced_to = "'return bool(test)'"
- elif isinstance(first_branch, astroid.Assign):
- if not isinstance(else_branch, astroid.Assign):
- return
- first_branch_is_bool = self._is_bool_const(first_branch)
- else_branch_is_bool = self._is_bool_const(else_branch)
- reduced_to = "'var = bool(test)'"
- else:
- return
-
- if not first_branch_is_bool or not else_branch_is_bool:
- return
- if not first_branch.value.value:
- # This is a case that can't be easily simplified and
- # if it can be simplified, it will usually result in a
- # code that's harder to understand and comprehend.
- # Let's take for instance `arg and arg <= 3`. This could theoretically be
- # reduced to `not arg or arg > 3`, but the net result is that now the
- # condition is harder to understand, because it requires understanding of
- # an extra clause:
- # * first, there is the negation of truthness with `not arg`
- # * the second clause is `arg > 3`, which occurs when arg has a
- # a truth value, but it implies that `arg > 3` is equivalent
- # with `arg and arg > 3`, which means that the user must
- # think about this assumption when evaluating `arg > 3`.
- # The original form is easier to grasp.
- return
-
- self.add_message('simplifiable-if-statement', node=node,
- args=(reduced_to,))
-
- def process_tokens(self, tokens):
- # Process tokens and look for 'if' or 'elif'
- for index, token in enumerate(tokens):
- token_string = token[1]
- if token_string == 'elif':
- self._elifs.append(True)
- elif token_string == 'if':
- self._elifs.append(False)
- elif six.PY3 and token.exact_type == tokenize.COMMA:
- self._check_one_element_trailing_comma_tuple(tokens, token, index)
-
- def _check_one_element_trailing_comma_tuple(self, tokens, token, index):
- left_tokens = itertools.islice(tokens, index + 1, None)
- same_line_remaining_tokens = list(
- other_token for other_token in left_tokens
- if other_token.start[0] == token.start[0]
- )
- is_last_element = all(
- other_token.type in (tokenize.NEWLINE, tokenize.COMMENT)
- for other_token in same_line_remaining_tokens
- )
-
- if not same_line_remaining_tokens or not is_last_element:
- return
-
- assign_token = tokens[index-2:index-1]
- if assign_token and '=' in assign_token[0].string:
- if self.linter.is_message_enabled('trailing-comma-tuple'):
- self.add_message('trailing-comma-tuple',
- line=token.start[0])
-
- def leave_module(self, _):
- self._init()
-
- @utils.check_messages('too-many-nested-blocks')
- def visit_tryexcept(self, node):
- self._check_nested_blocks(node)
-
- visit_tryfinally = visit_tryexcept
- visit_while = visit_tryexcept
-
- def _check_redefined_argument_from_local(self, name_node):
- if self._dummy_rgx and self._dummy_rgx.match(name_node.name):
- return
- if not name_node.lineno:
- # Unknown position, maybe it is a manually built AST?
- return
-
- scope = name_node.scope()
- if not isinstance(scope, astroid.FunctionDef):
- return
-
- for defined_argument in scope.args.nodes_of_class(astroid.AssignName):
- if defined_argument.name == name_node.name:
- self.add_message('redefined-argument-from-local',
- node=name_node,
- args=(name_node.name, ))
-
- @utils.check_messages('redefined-argument-from-local',
- 'too-many-nested-blocks')
- def visit_for(self, node):
- self._check_nested_blocks(node)
-
- for name in node.target.nodes_of_class(astroid.AssignName):
- self._check_redefined_argument_from_local(name)
-
- @utils.check_messages('redefined-argument-from-local')
- def visit_excepthandler(self, node):
- if node.name and isinstance(node.name, astroid.AssignName):
- self._check_redefined_argument_from_local(node.name)
-
- @utils.check_messages('redefined-argument-from-local')
- def visit_with(self, node):
- for _, names in node.items:
- if not names:
- continue
- for name in names.nodes_of_class(astroid.AssignName):
- self._check_redefined_argument_from_local(name)
-
- def visit_ifexp(self, _):
- self._if_counter += 1
-
- def visit_comprehension(self, node):
- self._if_counter += len(node.ifs)
-
- def _check_superfluous_else_return(self, node):
- if not node.orelse:
- # Not interested in if statements without else.
- return
-
- if _if_statement_is_always_returning(node) and not self._is_actual_elif(node):
- self.add_message('no-else-return', node=node)
-
- @utils.check_messages('too-many-nested-blocks', 'simplifiable-if-statement',
- 'no-else-return',)
- def visit_if(self, node):
- self._check_simplifiable_if(node)
- self._check_nested_blocks(node)
- self._check_superfluous_else_return(node)
- self._if_counter += 1
-
- @utils.check_messages('too-many-nested-blocks')
- def leave_functiondef(self, _):
- # check left-over nested blocks stack
- self._emit_nested_blocks_message_if_needed(self._nested_blocks)
- # new scope = reinitialize the stack of nested blocks
- self._nested_blocks = []
-
- def _check_nested_blocks(self, node):
- """Update and check the number of nested blocks
- """
- # only check block levels inside functions or methods
- if not isinstance(node.scope(), astroid.FunctionDef):
- return
- # messages are triggered on leaving the nested block. Here we save the
- # stack in case the current node isn't nested in the previous one
- nested_blocks = self._nested_blocks[:]
- if node.parent == node.scope():
- self._nested_blocks = [node]
- else:
- # go through ancestors from the most nested to the less
- for ancestor_node in reversed(self._nested_blocks):
- if ancestor_node == node.parent:
- break
- self._nested_blocks.pop()
- # if the node is a elif, this should not be another nesting level
- if isinstance(node, astroid.If) and self._elifs[self._if_counter]:
- if self._nested_blocks:
- self._nested_blocks.pop()
- self._nested_blocks.append(node)
-
- # send message only once per group of nested blocks
- if len(nested_blocks) > len(self._nested_blocks):
- self._emit_nested_blocks_message_if_needed(nested_blocks)
-
- def _emit_nested_blocks_message_if_needed(self, nested_blocks):
- if len(nested_blocks) > self.config.max_nested_blocks:
- self.add_message('too-many-nested-blocks', node=nested_blocks[0],
- args=(len(nested_blocks), self.config.max_nested_blocks))
-
- @staticmethod
- def _duplicated_isinstance_types(node):
- """Get the duplicated types from the underlying isinstance calls.
-
- :param astroid.BoolOp node: Node which should contain a bunch of isinstance calls.
- :returns: Dictionary of the comparison objects from the isinstance calls,
- to duplicate values from consecutive calls.
- :rtype: dict
- """
- duplicated_objects = set()
- all_types = collections.defaultdict(set)
-
- for call in node.values:
- if not isinstance(call, astroid.Call) or len(call.args) != 2:
- continue
-
- inferred = utils.safe_infer(call.func)
- if not inferred or not utils.is_builtin_object(inferred):
- continue
-
- if inferred.name != 'isinstance':
- continue
-
- isinstance_object = call.args[0].as_string()
- isinstance_types = call.args[1]
-
- if isinstance_object in all_types:
- duplicated_objects.add(isinstance_object)
-
- if isinstance(isinstance_types, astroid.Tuple):
- elems = [class_type.as_string() for class_type in isinstance_types.itered()]
- else:
- elems = [isinstance_types.as_string()]
- all_types[isinstance_object].update(elems)
-
- # Remove all keys which not duplicated
- return {key: value for key, value in all_types.items()
- if key in duplicated_objects}
-
- @utils.check_messages('consider-merging-isinstance')
- def visit_boolop(self, node):
- '''Check isinstance calls which can be merged together.'''
- if node.op != 'or':
- return
-
- first_args = self._duplicated_isinstance_types(node)
- for duplicated_name, class_names in first_args.items():
- names = sorted(name for name in class_names)
- self.add_message('consider-merging-isinstance',
- node=node,
- args=(duplicated_name, ', '.join(names)))
-
- @utils.check_messages('consider-using-ternary')
- def visit_assign(self, node):
- if self._is_and_or_ternary(node.value):
- cond, truth_value, false_value = self._and_or_ternary_arguments(node.value)
- elif self._is_seq_based_ternary(node.value):
- cond, truth_value, false_value = self._seq_based_ternary_params(node.value)
- else:
- return
-
- self.add_message(
- 'consider-using-ternary', node=node,
- args=(truth_value.as_string(),
- cond.as_string(),
- false_value.as_string()),)
-
- visit_return = visit_assign
-
- @staticmethod
- def _is_and_or_ternary(node):
- """
- Returns true if node is 'condition and true_value else false_value' form.
-
- All of: condition, true_value and false_value should not be a complex boolean expression
- """
- return (isinstance(node, astroid.BoolOp)
- and node.op == 'or' and len(node.values) == 2
- and isinstance(node.values[0], astroid.BoolOp)
- and not isinstance(node.values[1], astroid.BoolOp)
- and node.values[0].op == 'and'
- and not isinstance(node.values[0].values[1], astroid.BoolOp)
- and len(node.values[0].values) == 2)
-
- @staticmethod
- def _and_or_ternary_arguments(node):
- false_value = node.values[1]
- condition, true_value = node.values[0].values
- return condition, true_value, false_value
-
- @staticmethod
- def _is_seq_based_ternary(node):
- """Returns true if node is '[false_value,true_value][condition]' form"""
- return (isinstance(node, astroid.Subscript)
- and isinstance(node.value, (astroid.Tuple, astroid.List))
- and len(node.value.elts) == 2 and isinstance(node.slice, astroid.Index))
-
- @staticmethod
- def _seq_based_ternary_params(node):
- false_value, true_value = node.value.elts
- condition = node.slice.value
- return condition, true_value, false_value
-
-
-class RecommandationChecker(checkers.BaseChecker):
- __implements__ = (interfaces.IAstroidChecker,)
- name = 'refactoring'
- msgs = {'C0200': ('Consider using enumerate instead of iterating with range and len',
- 'consider-using-enumerate',
- 'Emitted when code that iterates with range and len is '
- 'encountered. Such code can be simplified by using the '
- 'enumerate builtin.'),
- 'C0201': ('Consider iterating the dictionary directly instead of calling .keys()',
- 'consider-iterating-dictionary',
- 'Emitted when the keys of a dictionary are iterated through the .keys() '
- 'method. It is enough to just iterate through the dictionary itself, as '
- 'in "for key in dictionary".'),
- }
-
- @staticmethod
- def _is_builtin(node, function):
- inferred = utils.safe_infer(node)
- if not inferred:
- return False
- return utils.is_builtin_object(inferred) and inferred.name == function
-
- @utils.check_messages('consider-iterating-dictionary')
- def visit_call(self, node):
- inferred = utils.safe_infer(node.func)
- if not inferred:
- return
-
- if not isinstance(inferred, astroid.BoundMethod):
- return
- if not isinstance(inferred.bound, astroid.Dict) or inferred.name != 'keys':
- return
-
- if isinstance(node.parent, (astroid.For, astroid.Comprehension)):
- self.add_message('consider-iterating-dictionary', node=node)
-
- @utils.check_messages('consider-using-enumerate')
- def visit_for(self, node):
- """Emit a convention whenever range and len are used for indexing."""
- # Verify that we have a `range(len(...))` call and that the object
- # which is iterated is used as a subscript in the body of the for.
-
- # Is it a proper range call?
- if not isinstance(node.iter, astroid.Call):
- return
- if not self._is_builtin(node.iter.func, 'range'):
- return
- if len(node.iter.args) != 1:
- return
-
- # Is it a proper len call?
- if not isinstance(node.iter.args[0], astroid.Call):
- return
- second_func = node.iter.args[0].func
- if not self._is_builtin(second_func, 'len'):
- return
- len_args = node.iter.args[0].args
- if not len_args or len(len_args) != 1:
- return
- iterating_object = len_args[0]
- if not isinstance(iterating_object, astroid.Name):
- return
-
- # Verify that the body of the for loop uses a subscript
- # with the object that was iterated. This uses some heuristics
- # in order to make sure that the same object is used in the
- # for body.
- for child in node.body:
- for subscript in child.nodes_of_class(astroid.Subscript):
- if not isinstance(subscript.value, astroid.Name):
- continue
- if not isinstance(subscript.slice, astroid.Index):
- continue
- if not isinstance(subscript.slice.value, astroid.Name):
- continue
- if subscript.slice.value.name != node.target.name:
- continue
- if iterating_object.name != subscript.value.name:
- continue
- if subscript.value.scope() != node.scope():
- # Ignore this subscript if it's not in the same
- # scope. This means that in the body of the for
- # loop, another scope was created, where the same
- # name for the iterating object was used.
- continue
- self.add_message('consider-using-enumerate', node=node)
- return
-
-
-class NotChecker(checkers.BaseChecker):
- """checks for too many not in comparison expressions
-
- - "not not" should trigger a warning
- - "not" followed by a comparison should trigger a warning
- """
- __implements__ = (interfaces.IAstroidChecker,)
- msgs = {'C0113': ('Consider changing "%s" to "%s"',
- 'unneeded-not',
- 'Used when a boolean expression contains an unneeded '
- 'negation.'),
- }
- name = 'basic'
- reverse_op = {'<': '>=', '<=': '>', '>': '<=', '>=': '<', '==': '!=',
- '!=': '==', 'in': 'not in', 'is': 'is not'}
- # sets are not ordered, so for example "not set(LEFT_VALS) <= set(RIGHT_VALS)" is
- # not equivalent to "set(LEFT_VALS) > set(RIGHT_VALS)"
- skipped_nodes = (astroid.Set,)
- # 'builtins' py3, '__builtin__' py2
- skipped_classnames = ['%s.%s' % (six.moves.builtins.__name__, qname)
- for qname in ('set', 'frozenset')]
-
- @utils.check_messages('unneeded-not')
- def visit_unaryop(self, node):
- if node.op != 'not':
- return
- operand = node.operand
-
- if isinstance(operand, astroid.UnaryOp) and operand.op == 'not':
- self.add_message('unneeded-not', node=node,
- args=(node.as_string(),
- operand.operand.as_string()))
- elif isinstance(operand, astroid.Compare):
- left = operand.left
- # ignore multiple comparisons
- if len(operand.ops) > 1:
- return
- operator, right = operand.ops[0]
- if operator not in self.reverse_op:
- return
- # Ignore __ne__ as function of __eq__
- frame = node.frame()
- if frame.name == '__ne__' and operator == '==':
- return
- for _type in (utils.node_type(left), utils.node_type(right)):
- if not _type:
- return
- if isinstance(_type, self.skipped_nodes):
- return
- if (isinstance(_type, astroid.Instance) and
- _type.qname() in self.skipped_classnames):
- return
- suggestion = '%s %s %s' % (left.as_string(),
- self.reverse_op[operator],
- right.as_string())
- self.add_message('unneeded-not', node=node,
- args=(node.as_string(), suggestion))
-
-
-def _is_len_call(node):
- """Checks if node is len(SOMETHING)."""
- return (isinstance(node, astroid.Call) and isinstance(node.func, astroid.Name) and
- node.func.name == 'len')
-
-def _is_constant_zero(node):
- return isinstance(node, astroid.Const) and node.value == 0
-
-def _node_is_test_condition(node):
- """ Checks if node is an if, while, assert or if expression statement."""
- return isinstance(node, (astroid.If, astroid.While, astroid.Assert, astroid.IfExp))
-
-
-class LenChecker(checkers.BaseChecker):
- """Checks for incorrect usage of len() inside conditions.
- Pep8 states:
- For sequences, (strings, lists, tuples), use the fact that empty sequences are false.
-
- Yes: if not seq:
- if seq:
-
- No: if len(seq):
- if not len(seq):
-
- Problems detected:
- * if len(sequence):
- * if not len(sequence):
- * if len(sequence) == 0:
- * if len(sequence) != 0:
- * if len(sequence) > 0:
- """
-
- __implements__ = (interfaces.IAstroidChecker,)
-
- # configuration section name
- name = 'len'
- msgs = {'C1801': ('Do not use `len(SEQUENCE)` as condition value',
- 'len-as-condition',
- 'Used when Pylint detects incorrect use of len(sequence) inside '
- 'conditions.'),
- }
-
- priority = -2
- options = ()
-
- @utils.check_messages('len-as-condition')
- def visit_call(self, node):
- # a len(S) call is used inside a test condition
- # could be if, while, assert or if expression statement
- # e.g. `if len(S):`
- if _is_len_call(node):
- # the len() call could also be nested together with other
- # boolean operations, e.g. `if z or len(x):`
- parent = node.parent
- while isinstance(parent, astroid.BoolOp):
- parent = parent.parent
-
- # we're finally out of any nested boolean operations so check if
- # this len() call is part of a test condition
- if not _node_is_test_condition(parent):
- return
- if not (node is parent.test or parent.test.parent_of(node)):
- return
- self.add_message('len-as-condition', node=node)
-
- @utils.check_messages('len-as-condition')
- def visit_unaryop(self, node):
- """`not len(S)` must become `not S` regardless if the parent block
- is a test condition or something else (boolean expression)
- e.g. `if not len(S):`"""
- if isinstance(node, astroid.UnaryOp) and node.op == 'not' and _is_len_call(node.operand):
- self.add_message('len-as-condition', node=node)
-
- @utils.check_messages('len-as-condition')
- def visit_compare(self, node):
- # compare nodes are trickier because the len(S) expression
- # may be somewhere in the middle of the node
-
- # note: astroid.Compare has the left most operand in node.left
- # while the rest are a list of tuples in node.ops
- # the format of the tuple is ('compare operator sign', node)
- # here we squash everything into `ops` to make it easier for processing later
- ops = [('', node.left)]
- ops.extend(node.ops)
- ops = list(itertools.chain(*ops))
-
- for ops_idx in range(len(ops) - 2):
- op_1 = ops[ops_idx]
- op_2 = ops[ops_idx + 1]
- op_3 = ops[ops_idx + 2]
- error_detected = False
-
- # 0 ?? len()
- if _is_constant_zero(op_1) and op_2 in ['==', '!=', '<'] and _is_len_call(op_3):
- error_detected = True
- # len() ?? 0
- elif _is_len_call(op_1) and op_2 in ['==', '!=', '>'] and _is_constant_zero(op_3):
- error_detected = True
-
- if error_detected:
- parent = node.parent
- # traverse the AST to figure out if this comparison was part of
- # a test condition
- while parent and not _node_is_test_condition(parent):
- parent = parent.parent
-
- # report only if this len() comparison is part of a test condition
- # for example: return len() > 0 should not report anything
- if _node_is_test_condition(parent):
- self.add_message('len-as-condition', node=node)
-
-
-def register(linter):
- """Required method to auto register this checker."""
- linter.register_checker(RefactoringChecker(linter))
- linter.register_checker(NotChecker(linter))
- linter.register_checker(RecommandationChecker(linter))
- linter.register_checker(LenChecker(linter))
diff --git a/pymode/libs/pylint/checkers/similar.py b/pymode/libs/pylint/checkers/similar.py
deleted file mode 100644
index aa59bfd8..00000000
--- a/pymode/libs/pylint/checkers/similar.py
+++ /dev/null
@@ -1,363 +0,0 @@
-# Copyright (c) 2006, 2008-2014 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2014-2016 Claudiu Popa
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-# pylint: disable=W0622
-"""a similarities / code duplication command line tool and pylint checker
-"""
-
-from __future__ import print_function
-import sys
-from collections import defaultdict
-
-import six
-from six.moves import zip
-
-from pylint.interfaces import IRawChecker
-from pylint.checkers import BaseChecker, table_lines_from_stats
-from pylint.reporters.ureports.nodes import Table
-
-
-class Similar(object):
- """finds copy-pasted lines of code in a project"""
-
- def __init__(self, min_lines=4, ignore_comments=False,
- ignore_docstrings=False, ignore_imports=False):
- self.min_lines = min_lines
- self.ignore_comments = ignore_comments
- self.ignore_docstrings = ignore_docstrings
- self.ignore_imports = ignore_imports
- self.linesets = []
-
- def append_stream(self, streamid, stream, encoding=None):
- """append a file to search for similarities"""
- if encoding is None:
- readlines = stream.readlines
- else:
- readlines = lambda: [line.decode(encoding) for line in stream]
- try:
- self.linesets.append(LineSet(streamid,
- readlines(),
- self.ignore_comments,
- self.ignore_docstrings,
- self.ignore_imports))
- except UnicodeDecodeError:
- pass
-
- def run(self):
- """start looking for similarities and display results on stdout"""
- self._display_sims(self._compute_sims())
-
- def _compute_sims(self):
- """compute similarities in appended files"""
- no_duplicates = defaultdict(list)
- for num, lineset1, idx1, lineset2, idx2 in self._iter_sims():
- duplicate = no_duplicates[num]
- for couples in duplicate:
- if (lineset1, idx1) in couples or (lineset2, idx2) in couples:
- couples.add((lineset1, idx1))
- couples.add((lineset2, idx2))
- break
- else:
- duplicate.append(set([(lineset1, idx1), (lineset2, idx2)]))
- sims = []
- for num, ensembles in six.iteritems(no_duplicates):
- for couples in ensembles:
- sims.append((num, couples))
- sims.sort()
- sims.reverse()
- return sims
-
- def _display_sims(self, sims):
- """display computed similarities on stdout"""
- nb_lignes_dupliquees = 0
- for num, couples in sims:
- print()
- print(num, "similar lines in", len(couples), "files")
- couples = sorted(couples)
- for lineset, idx in couples:
- print("==%s:%s" % (lineset.name, idx))
- # pylint: disable=W0631
- for line in lineset._real_lines[idx:idx+num]:
- print(" ", line.rstrip())
- nb_lignes_dupliquees += num * (len(couples)-1)
- nb_total_lignes = sum([len(lineset) for lineset in self.linesets])
- print("TOTAL lines=%s duplicates=%s percent=%.2f" \
- % (nb_total_lignes, nb_lignes_dupliquees,
- nb_lignes_dupliquees*100. / nb_total_lignes))
-
- def _find_common(self, lineset1, lineset2):
- """find similarities in the two given linesets"""
- lines1 = lineset1.enumerate_stripped
- lines2 = lineset2.enumerate_stripped
- find = lineset2.find
- index1 = 0
- min_lines = self.min_lines
- while index1 < len(lineset1):
- skip = 1
- num = 0
- for index2 in find(lineset1[index1]):
- non_blank = 0
- for num, ((_, line1), (_, line2)) in enumerate(
- zip(lines1(index1), lines2(index2))):
- if line1 != line2:
- if non_blank > min_lines:
- yield num, lineset1, index1, lineset2, index2
- skip = max(skip, num)
- break
- if line1:
- non_blank += 1
- else:
- # we may have reach the end
- num += 1
- if non_blank > min_lines:
- yield num, lineset1, index1, lineset2, index2
- skip = max(skip, num)
- index1 += skip
-
- def _iter_sims(self):
- """iterate on similarities among all files, by making a cartesian
- product
- """
- for idx, lineset in enumerate(self.linesets[:-1]):
- for lineset2 in self.linesets[idx+1:]:
- for sim in self._find_common(lineset, lineset2):
- yield sim
-
-def stripped_lines(lines, ignore_comments, ignore_docstrings, ignore_imports):
- """return lines with leading/trailing whitespace and any ignored code
- features removed
- """
-
- strippedlines = []
- docstring = None
- for line in lines:
- line = line.strip()
- if ignore_docstrings:
- if not docstring and \
- (line.startswith('"""') or line.startswith("'''")):
- docstring = line[:3]
- line = line[3:]
- if docstring:
- if line.endswith(docstring):
- docstring = None
- line = ''
- if ignore_imports:
- if line.startswith("import ") or line.startswith("from "):
- line = ''
- if ignore_comments:
- # XXX should use regex in checkers/format to avoid cutting
- # at a "#" in a string
- line = line.split('#', 1)[0].strip()
- strippedlines.append(line)
- return strippedlines
-
-
-class LineSet(object):
- """Holds and indexes all the lines of a single source file"""
- def __init__(self, name, lines, ignore_comments=False,
- ignore_docstrings=False, ignore_imports=False):
- self.name = name
- self._real_lines = lines
- self._stripped_lines = stripped_lines(lines, ignore_comments,
- ignore_docstrings,
- ignore_imports)
- self._index = self._mk_index()
-
- def __str__(self):
- return '' % self.name
-
- def __len__(self):
- return len(self._real_lines)
-
- def __getitem__(self, index):
- return self._stripped_lines[index]
-
- def __lt__(self, other):
- return self.name < other.name
-
- def __hash__(self):
- return id(self)
-
- def enumerate_stripped(self, start_at=0):
- """return an iterator on stripped lines, starting from a given index
- if specified, else 0
- """
- idx = start_at
- if start_at:
- lines = self._stripped_lines[start_at:]
- else:
- lines = self._stripped_lines
- for line in lines:
- #if line:
- yield idx, line
- idx += 1
-
- def find(self, stripped_line):
- """return positions of the given stripped line in this set"""
- return self._index.get(stripped_line, ())
-
- def _mk_index(self):
- """create the index for this set"""
- index = defaultdict(list)
- for line_no, line in enumerate(self._stripped_lines):
- if line:
- index[line].append(line_no)
- return index
-
-
-MSGS = {'R0801': ('Similar lines in %s files\n%s',
- 'duplicate-code',
- 'Indicates that a set of similar lines has been detected \
- among multiple file. This usually means that the code should \
- be refactored to avoid this duplication.')}
-
-def report_similarities(sect, stats, old_stats):
- """make a layout with some stats about duplication"""
- lines = ['', 'now', 'previous', 'difference']
- lines += table_lines_from_stats(stats, old_stats,
- ('nb_duplicated_lines',
- 'percent_duplicated_lines'))
- sect.append(Table(children=lines, cols=4, rheaders=1, cheaders=1))
-
-
-# wrapper to get a pylint checker from the similar class
-class SimilarChecker(BaseChecker, Similar):
- """checks for similarities and duplicated code. This computation may be
- memory / CPU intensive, so you should disable it if you experiment some
- problems.
- """
-
- __implements__ = (IRawChecker,)
- # configuration section name
- name = 'similarities'
- # messages
- msgs = MSGS
- # configuration options
- # for available dict keys/values see the optik parser 'add_option' method
- options = (('min-similarity-lines',
- {'default' : 4, 'type' : "int", 'metavar' : '',
- 'help' : 'Minimum lines number of a similarity.'}),
- ('ignore-comments',
- {'default' : True, 'type' : 'yn', 'metavar' : '',
- 'help': 'Ignore comments when computing similarities.'}
- ),
- ('ignore-docstrings',
- {'default' : True, 'type' : 'yn', 'metavar' : '',
- 'help': 'Ignore docstrings when computing similarities.'}
- ),
- ('ignore-imports',
- {'default' : False, 'type' : 'yn', 'metavar' : '',
- 'help': 'Ignore imports when computing similarities.'}
- ),
- )
- # reports
- reports = (('RP0801', 'Duplication', report_similarities),)
-
- def __init__(self, linter=None):
- BaseChecker.__init__(self, linter)
- Similar.__init__(self, min_lines=4,
- ignore_comments=True, ignore_docstrings=True)
- self.stats = None
-
- def set_option(self, optname, value, action=None, optdict=None):
- """method called to set an option (registered in the options list)
-
- overridden to report options setting to Similar
- """
- BaseChecker.set_option(self, optname, value, action, optdict)
- if optname == 'min-similarity-lines':
- self.min_lines = self.config.min_similarity_lines
- elif optname == 'ignore-comments':
- self.ignore_comments = self.config.ignore_comments
- elif optname == 'ignore-docstrings':
- self.ignore_docstrings = self.config.ignore_docstrings
- elif optname == 'ignore-imports':
- self.ignore_imports = self.config.ignore_imports
-
- def open(self):
- """init the checkers: reset linesets and statistics information"""
- self.linesets = []
- self.stats = self.linter.add_stats(nb_duplicated_lines=0,
- percent_duplicated_lines=0)
-
- def process_module(self, node):
- """process a module
-
- the module's content is accessible via the stream object
-
- stream must implement the readlines method
- """
- with node.stream() as stream:
- self.append_stream(self.linter.current_name,
- stream,
- node.file_encoding)
-
- def close(self):
- """compute and display similarities on closing (i.e. end of parsing)"""
- total = sum(len(lineset) for lineset in self.linesets)
- duplicated = 0
- stats = self.stats
- for num, couples in self._compute_sims():
- msg = []
- for lineset, idx in couples:
- msg.append("==%s:%s" % (lineset.name, idx))
- msg.sort()
- # pylint: disable=W0631
- for line in lineset._real_lines[idx:idx+num]:
- msg.append(line.rstrip())
- self.add_message('R0801', args=(len(couples), '\n'.join(msg)))
- duplicated += num * (len(couples) - 1)
- stats['nb_duplicated_lines'] = duplicated
- stats['percent_duplicated_lines'] = total and duplicated * 100. / total
-
-
-def register(linter):
- """required method to auto register this checker """
- linter.register_checker(SimilarChecker(linter))
-
-def usage(status=0):
- """display command line usage information"""
- print("finds copy pasted blocks in a set of files")
- print()
- print('Usage: symilar [-d|--duplicates min_duplicated_lines] \
-[-i|--ignore-comments] [--ignore-docstrings] [--ignore-imports] file1...')
- sys.exit(status)
-
-def Run(argv=None):
- """standalone command line access point"""
- if argv is None:
- argv = sys.argv[1:]
- from getopt import getopt
- s_opts = 'hdi'
- l_opts = ('help', 'duplicates=', 'ignore-comments', 'ignore-imports',
- 'ignore-docstrings')
- min_lines = 4
- ignore_comments = False
- ignore_docstrings = False
- ignore_imports = False
- opts, args = getopt(argv, s_opts, l_opts)
- for opt, val in opts:
- if opt in ('-d', '--duplicates'):
- min_lines = int(val)
- elif opt in ('-h', '--help'):
- usage()
- elif opt in ('-i', '--ignore-comments'):
- ignore_comments = True
- elif opt in ('--ignore-docstrings',):
- ignore_docstrings = True
- elif opt in ('--ignore-imports',):
- ignore_imports = True
- if not args:
- usage(1)
- sim = Similar(min_lines, ignore_comments, ignore_docstrings, ignore_imports)
- for filename in args:
- with open(filename) as stream:
- sim.append_stream(filename, stream)
- sim.run()
- sys.exit(0)
-
-if __name__ == '__main__':
- Run()
diff --git a/pymode/libs/pylint/checkers/spelling.py b/pymode/libs/pylint/checkers/spelling.py
deleted file mode 100644
index 7b0eb8f2..00000000
--- a/pymode/libs/pylint/checkers/spelling.py
+++ /dev/null
@@ -1,265 +0,0 @@
-# Copyright (c) 2014 Michal Nowikowski
-# Copyright (c) 2014-2016 Claudiu Popa
-# Copyright (c) 2015 Pavel Roskin
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""Checker for spelling errors in comments and docstrings.
-"""
-
-import os
-import sys
-import tokenize
-import string
-import re
-
-try:
- import enchant
- from enchant.tokenize import get_tokenizer, Filter, EmailFilter, URLFilter, WikiWordFilter
-except ImportError:
- enchant = None
- class Filter:
- def _skip(self, word):
- raise NotImplementedError
-
-import six
-
-from pylint.interfaces import ITokenChecker, IAstroidChecker
-from pylint.checkers import BaseTokenChecker
-from pylint.checkers.utils import check_messages
-
-if sys.version_info[0] >= 3:
- maketrans = str.maketrans
-else:
- maketrans = string.maketrans
-
-if enchant is not None:
- br = enchant.Broker()
- dicts = br.list_dicts()
- dict_choices = [''] + [d[0] for d in dicts]
- dicts = ["%s (%s)" % (d[0], d[1].name) for d in dicts]
- dicts = ", ".join(dicts)
- instr = ""
-else:
- dicts = "none"
- dict_choices = ['']
- instr = " To make it working install python-enchant package."
-
-table = maketrans("", "")
-
-
-class WordsWithDigigtsFilter(Filter):
- """Skips words with digits.
- """
-
- def _skip(self, word):
- for char in word:
- if char.isdigit():
- return True
- return False
-
-
-class WordsWithUnderscores(Filter):
- """Skips words with underscores.
-
- They are probably function parameter names.
- """
- def _skip(self, word):
- return '_' in word
-
-
-class SpellingChecker(BaseTokenChecker):
- """Check spelling in comments and docstrings"""
- __implements__ = (ITokenChecker, IAstroidChecker)
- name = 'spelling'
- msgs = {
- 'C0401': ('Wrong spelling of a word \'%s\' in a comment:\n%s\n'
- '%s\nDid you mean: \'%s\'?',
- 'wrong-spelling-in-comment',
- 'Used when a word in comment is not spelled correctly.'),
- 'C0402': ('Wrong spelling of a word \'%s\' in a docstring:\n%s\n'
- '%s\nDid you mean: \'%s\'?',
- 'wrong-spelling-in-docstring',
- 'Used when a word in docstring is not spelled correctly.'),
- 'C0403': ('Invalid characters %r in a docstring',
- 'invalid-characters-in-docstring',
- 'Used when a word in docstring cannot be checked by enchant.'),
- }
- options = (('spelling-dict',
- {'default' : '', 'type' : 'choice', 'metavar' : '',
- 'choices': dict_choices,
- 'help' : 'Spelling dictionary name. '
- 'Available dictionaries: %s.%s' % (dicts, instr)}),
- ('spelling-ignore-words',
- {'default' : '',
- 'type' : 'string',
- 'metavar' : '',
- 'help' : 'List of comma separated words that '
- 'should not be checked.'}),
- ('spelling-private-dict-file',
- {'default' : '',
- 'type' : 'string',
- 'metavar' : '',
- 'help' : 'A path to a file that contains private '
- 'dictionary; one word per line.'}),
- ('spelling-store-unknown-words',
- {'default' : 'n', 'type' : 'yn', 'metavar' : '',
- 'help' : 'Tells whether to store unknown words to '
- 'indicated private dictionary in '
- '--spelling-private-dict-file option instead of '
- 'raising a message.'}),
- )
-
- def open(self):
- self.initialized = False
- self.private_dict_file = None
-
- if enchant is None:
- return
- dict_name = self.config.spelling_dict
- if not dict_name:
- return
-
- self.ignore_list = [w.strip() for w in self.config.spelling_ignore_words.split(",")]
- # "param" appears in docstring in param description and
- # "pylint" appears in comments in pylint pragmas.
- self.ignore_list.extend(["param", "pylint"])
-
- # Expand tilde to allow e.g. spelling-private-dict-file = ~/.pylintdict
- if self.config.spelling_private_dict_file:
- self.config.spelling_private_dict_file = os.path.expanduser(
- self.config.spelling_private_dict_file)
-
- if self.config.spelling_private_dict_file:
- self.spelling_dict = enchant.DictWithPWL(
- dict_name, self.config.spelling_private_dict_file)
- self.private_dict_file = open(
- self.config.spelling_private_dict_file, "a")
- else:
- self.spelling_dict = enchant.Dict(dict_name)
-
- if self.config.spelling_store_unknown_words:
- self.unknown_words = set()
-
- # Prepare regex for stripping punctuation signs from text.
- # ' and _ are treated in a special way.
- puncts = string.punctuation.replace("'", "").replace("_", "")
- self.punctuation_regex = re.compile('[%s]' % re.escape(puncts))
- self.tokenizer = get_tokenizer(dict_name, filters=[EmailFilter,
- URLFilter,
- WikiWordFilter,
- WordsWithDigigtsFilter,
- WordsWithUnderscores])
- self.initialized = True
-
- def close(self):
- if self.private_dict_file:
- self.private_dict_file.close()
-
- def _check_spelling(self, msgid, line, line_num):
- for word, _ in self.tokenizer(line.strip()):
- # Skip words from ignore list.
- if word in self.ignore_list:
- continue
-
- orig_word = word
- word = word.lower()
-
- # Strip starting u' from unicode literals and r' from raw strings.
- if (word.startswith("u'") or
- word.startswith('u"') or
- word.startswith("r'") or
- word.startswith('r"')) and len(word) > 2:
- word = word[2:]
-
- # If it is a known word, then continue.
- try:
- if self.spelling_dict.check(word):
- continue
- except enchant.errors.Error:
- # this can only happen in docstrings, not comments
- self.add_message('invalid-characters-in-docstring',
- line=line_num, args=(word,))
- continue
-
- # Store word to private dict or raise a message.
- if self.config.spelling_store_unknown_words:
- if word not in self.unknown_words:
- self.private_dict_file.write("%s\n" % word)
- self.unknown_words.add(word)
- else:
- # Present up to 4 suggestions.
- # TODO: add support for customising this.
- suggestions = self.spelling_dict.suggest(word)[:4]
-
- m = re.search(r"(\W|^)(%s)(\W|$)" % word, line.lower())
- if m:
- # Start position of second group in regex.
- col = m.regs[2][0]
- else:
- col = line.lower().index(word)
- indicator = (" " * col) + ("^" * len(word))
-
- self.add_message(msgid, line=line_num,
- args=(orig_word, line,
- indicator,
- "'{0}'".format("' or '".join(suggestions))))
-
- def process_tokens(self, tokens):
- if not self.initialized:
- return
-
- # Process tokens and look for comments.
- for (tok_type, token, (start_row, _), _, _) in tokens:
- if tok_type == tokenize.COMMENT:
- if start_row == 1 and token.startswith('#!/'):
- # Skip shebang lines
- continue
- if token.startswith('# pylint:'):
- # Skip pylint enable/disable comments
- continue
- self._check_spelling('wrong-spelling-in-comment',
- token, start_row)
-
- @check_messages('wrong-spelling-in-docstring')
- def visit_module(self, node):
- if not self.initialized:
- return
- self._check_docstring(node)
-
- @check_messages('wrong-spelling-in-docstring')
- def visit_classdef(self, node):
- if not self.initialized:
- return
- self._check_docstring(node)
-
- @check_messages('wrong-spelling-in-docstring')
- def visit_functiondef(self, node):
- if not self.initialized:
- return
- self._check_docstring(node)
-
- visit_asyncfunctiondef = visit_functiondef
-
- def _check_docstring(self, node):
- """check the node has any spelling errors"""
- docstring = node.doc
- if not docstring:
- return
-
- start_line = node.lineno + 1
- if six.PY2:
- encoding = node.root().file_encoding
- docstring = docstring.decode(encoding or sys.getdefaultencoding(),
- 'replace')
-
- # Go through lines of docstring
- for idx, line in enumerate(docstring.splitlines()):
- self._check_spelling('wrong-spelling-in-docstring',
- line, start_line + idx)
-
-
-def register(linter):
- """required method to auto register this checker """
- linter.register_checker(SpellingChecker(linter))
diff --git a/pymode/libs/pylint/checkers/stdlib.py b/pymode/libs/pylint/checkers/stdlib.py
deleted file mode 100644
index 38282c27..00000000
--- a/pymode/libs/pylint/checkers/stdlib.py
+++ /dev/null
@@ -1,278 +0,0 @@
-# Copyright (c) 2013-2014 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2014 Vlad Temian
-# Copyright (c) 2014-2016 Claudiu Popa
-# Copyright (c) 2015 Cezar
-# Copyright (c) 2015 Chris Rebert
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""Checkers for various standard library functions."""
-
-import sys
-
-import six
-
-import astroid
-from astroid.bases import Instance
-from pylint.interfaces import IAstroidChecker
-from pylint.checkers import BaseChecker
-from pylint.checkers import utils
-
-
-OPEN_FILES = {'open', 'file'}
-UNITTEST_CASE = 'unittest.case'
-if sys.version_info >= (3, 0):
- OPEN_MODULE = '_io'
-else:
- OPEN_MODULE = '__builtin__'
-
-
-def _check_mode_str(mode):
- # check type
- if not isinstance(mode, six.string_types):
- return False
- # check syntax
- modes = set(mode)
- _mode = "rwatb+U"
- creating = False
- if six.PY3:
- _mode += "x"
- creating = "x" in modes
- if modes - set(_mode) or len(mode) > len(modes):
- return False
- # check logic
- reading = "r" in modes
- writing = "w" in modes
- appending = "a" in modes
- text = "t" in modes
- binary = "b" in modes
- if "U" in modes:
- if writing or appending or creating and six.PY3:
- return False
- reading = True
- if not six.PY3:
- binary = True
- if text and binary:
- return False
- total = reading + writing + appending + (creating if six.PY3 else 0)
- if total > 1:
- return False
- if not (reading or writing or appending or creating and six.PY3):
- return False
- # other 2.x constraints
- if not six.PY3:
- if "U" in mode:
- mode = mode.replace("U", "")
- if "r" not in mode:
- mode = "r" + mode
- return mode[0] in ("r", "w", "a", "U")
- return True
-
-
-class StdlibChecker(BaseChecker):
- __implements__ = (IAstroidChecker,)
- name = 'stdlib'
-
- msgs = {
- 'W1501': ('"%s" is not a valid mode for open.',
- 'bad-open-mode',
- 'Python supports: r, w, a[, x] modes with b, +, '
- 'and U (only with r) options. '
- 'See http://docs.python.org/2/library/functions.html#open'),
- 'W1502': ('Using datetime.time in a boolean context.',
- 'boolean-datetime',
- 'Using datetime.time in a boolean context can hide '
- 'subtle bugs when the time they represent matches '
- 'midnight UTC. This behaviour was fixed in Python 3.5. '
- 'See http://bugs.python.org/issue13936 for reference.',
- {'maxversion': (3, 5)}),
- 'W1503': ('Redundant use of %s with constant '
- 'value %r',
- 'redundant-unittest-assert',
- 'The first argument of assertTrue and assertFalse is '
- 'a condition. If a constant is passed as parameter, that '
- 'condition will be always true. In this case a warning '
- 'should be emitted.'),
- 'W1505': ('Using deprecated method %s()',
- 'deprecated-method',
- 'The method is marked as deprecated and will be removed in '
- 'a future version of Python. Consider looking for an '
- 'alternative in the documentation.'),
- }
-
- deprecated = {
- 0: [
- 'cgi.parse_qs', 'cgi.parse_qsl',
- 'ctypes.c_buffer',
- 'distutils.command.register.register.check_metadata',
- 'distutils.command.sdist.sdist.check_metadata',
- 'tkinter.Misc.tk_menuBar',
- 'tkinter.Menu.tk_bindForTraversal',
- ],
- 2: {
- (2, 6, 0): [
- 'commands.getstatus',
- 'os.popen2',
- 'os.popen3',
- 'os.popen4',
- 'macostools.touched',
- ],
- (2, 7, 0): [
- 'unittest.case.TestCase.assertEquals',
- 'unittest.case.TestCase.assertNotEquals',
- 'unittest.case.TestCase.assertAlmostEquals',
- 'unittest.case.TestCase.assertNotAlmostEquals',
- 'unittest.case.TestCase.assert_',
- 'xml.etree.ElementTree.Element.getchildren',
- 'xml.etree.ElementTree.Element.getiterator',
- 'xml.etree.ElementTree.XMLParser.getiterator',
- 'xml.etree.ElementTree.XMLParser.doctype',
- ],
- },
- 3: {
- (3, 0, 0): [
- 'inspect.getargspec',
- 'unittest.case.TestCase._deprecate.deprecated_func',
- ],
- (3, 1, 0): [
- 'base64.encodestring', 'base64.decodestring',
- 'ntpath.splitunc',
- ],
- (3, 2, 0): [
- 'cgi.escape',
- 'configparser.RawConfigParser.readfp',
- 'xml.etree.ElementTree.Element.getchildren',
- 'xml.etree.ElementTree.Element.getiterator',
- 'xml.etree.ElementTree.XMLParser.getiterator',
- 'xml.etree.ElementTree.XMLParser.doctype',
- ],
- (3, 3, 0): [
- 'inspect.getmoduleinfo',
- 'logging.warn', 'logging.Logger.warn',
- 'logging.LoggerAdapter.warn',
- 'nntplib._NNTPBase.xpath',
- 'platform.popen',
- ],
- (3, 4, 0): [
- 'importlib.find_loader',
- 'plistlib.readPlist', 'plistlib.writePlist',
- 'plistlib.readPlistFromBytes',
- 'plistlib.writePlistToBytes',
- ],
- (3, 4, 4): [
- 'asyncio.tasks.async',
- ],
- (3, 5, 0): [
- 'fractions.gcd',
- 'inspect.getfullargspec', 'inspect.getargvalues',
- 'inspect.formatargspec', 'inspect.formatargvalues',
- 'inspect.getcallargs',
- 'platform.linux_distribution', 'platform.dist',
- ],
- (3, 6, 0): [
- 'importlib._bootstrap_external.FileLoader.load_module',
- ],
- },
- }
-
- @utils.check_messages('bad-open-mode', 'redundant-unittest-assert',
- 'deprecated-method')
- def visit_call(self, node):
- """Visit a CallFunc node."""
- try:
- for inferred in node.func.infer():
- if inferred.root().name == OPEN_MODULE:
- if getattr(node.func, 'name', None) in OPEN_FILES:
- self._check_open_mode(node)
- if inferred.root().name == UNITTEST_CASE:
- self._check_redundant_assert(node, inferred)
- self._check_deprecated_method(node, inferred)
- except astroid.InferenceError:
- return
-
- @utils.check_messages('boolean-datetime')
- def visit_unaryop(self, node):
- if node.op == 'not':
- self._check_datetime(node.operand)
-
- @utils.check_messages('boolean-datetime')
- def visit_if(self, node):
- self._check_datetime(node.test)
-
- @utils.check_messages('boolean-datetime')
- def visit_ifexp(self, node):
- self._check_datetime(node.test)
-
- @utils.check_messages('boolean-datetime')
- def visit_boolop(self, node):
- for value in node.values:
- self._check_datetime(value)
-
- def _check_deprecated_method(self, node, inferred):
- py_vers = sys.version_info[0]
-
- if isinstance(node.func, astroid.Attribute):
- func_name = node.func.attrname
- elif isinstance(node.func, astroid.Name):
- func_name = node.func.name
- else:
- # Not interested in other nodes.
- return
-
- # Reject nodes which aren't of interest to us.
- acceptable_nodes = (astroid.BoundMethod,
- astroid.UnboundMethod,
- astroid.FunctionDef)
- if not isinstance(inferred, acceptable_nodes):
- return
-
- qname = inferred.qname()
- if qname in self.deprecated[0]:
- self.add_message('deprecated-method', node=node,
- args=(func_name, ))
- else:
- for since_vers, func_list in self.deprecated[py_vers].items():
- if since_vers <= sys.version_info and qname in func_list:
- self.add_message('deprecated-method', node=node,
- args=(func_name, ))
- break
-
- def _check_redundant_assert(self, node, infer):
- if (isinstance(infer, astroid.BoundMethod) and
- node.args and isinstance(node.args[0], astroid.Const) and
- infer.name in ['assertTrue', 'assertFalse']):
- self.add_message('redundant-unittest-assert',
- args=(infer.name, node.args[0].value, ),
- node=node)
-
- def _check_datetime(self, node):
- """ Check that a datetime was infered.
- If so, emit boolean-datetime warning.
- """
- try:
- infered = next(node.infer())
- except astroid.InferenceError:
- return
- if (isinstance(infered, Instance) and
- infered.qname() == 'datetime.time'):
- self.add_message('boolean-datetime', node=node)
-
- def _check_open_mode(self, node):
- """Check that the mode argument of an open or file call is valid."""
- try:
- mode_arg = utils.get_argument_from_call(node, position=1,
- keyword='mode')
- except utils.NoSuchArgumentError:
- return
- if mode_arg:
- mode_arg = utils.safe_infer(mode_arg)
- if (isinstance(mode_arg, astroid.Const)
- and not _check_mode_str(mode_arg.value)):
- self.add_message('bad-open-mode', node=node,
- args=mode_arg.value)
-
-
-def register(linter):
- """required method to auto register this checker """
- linter.register_checker(StdlibChecker(linter))
diff --git a/pymode/libs/pylint/checkers/strings.py b/pymode/libs/pylint/checkers/strings.py
deleted file mode 100644
index 8135b412..00000000
--- a/pymode/libs/pylint/checkers/strings.py
+++ /dev/null
@@ -1,621 +0,0 @@
-# Copyright (c) 2009-2014 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2013-2014 Google, Inc.
-# Copyright (c) 2013-2016 Claudiu Popa
-
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""Checker for string formatting operations.
-"""
-
-import sys
-import tokenize
-import string
-import numbers
-
-import six
-
-import astroid
-from pylint.interfaces import ITokenChecker, IAstroidChecker, IRawChecker
-from pylint.checkers import BaseChecker, BaseTokenChecker
-from pylint.checkers import utils
-from pylint.checkers.utils import check_messages
-
-
-_PY3K = sys.version_info[:2] >= (3, 0)
-_PY27 = sys.version_info[:2] == (2, 7)
-
-MSGS = {
- 'E1300': ("Unsupported format character %r (%#02x) at index %d",
- "bad-format-character",
- "Used when a unsupported format character is used in a format\
- string."),
- 'E1301': ("Format string ends in middle of conversion specifier",
- "truncated-format-string",
- "Used when a format string terminates before the end of a \
- conversion specifier."),
- 'E1302': ("Mixing named and unnamed conversion specifiers in format string",
- "mixed-format-string",
- "Used when a format string contains both named (e.g. '%(foo)d') \
- and unnamed (e.g. '%d') conversion specifiers. This is also \
- used when a named conversion specifier contains * for the \
- minimum field width and/or precision."),
- 'E1303': ("Expected mapping for format string, not %s",
- "format-needs-mapping",
- "Used when a format string that uses named conversion specifiers \
- is used with an argument that is not a mapping."),
- 'W1300': ("Format string dictionary key should be a string, not %s",
- "bad-format-string-key",
- "Used when a format string that uses named conversion specifiers \
- is used with a dictionary whose keys are not all strings."),
- 'W1301': ("Unused key %r in format string dictionary",
- "unused-format-string-key",
- "Used when a format string that uses named conversion specifiers \
- is used with a dictionary that contains keys not required by the \
- format string."),
- 'E1304': ("Missing key %r in format string dictionary",
- "missing-format-string-key",
- "Used when a format string that uses named conversion specifiers \
- is used with a dictionary that doesn't contain all the keys \
- required by the format string."),
- 'E1305': ("Too many arguments for format string",
- "too-many-format-args",
- "Used when a format string that uses unnamed conversion \
- specifiers is given too many arguments."),
- 'E1306': ("Not enough arguments for format string",
- "too-few-format-args",
- "Used when a format string that uses unnamed conversion \
- specifiers is given too few arguments"),
- 'E1310': ("Suspicious argument in %s.%s call",
- "bad-str-strip-call",
- "The argument to a str.{l,r,}strip call contains a"
- " duplicate character, "),
- 'W1302': ("Invalid format string",
- "bad-format-string",
- "Used when a PEP 3101 format string is invalid.",
- {'minversion': (2, 7)}),
- 'W1303': ("Missing keyword argument %r for format string",
- "missing-format-argument-key",
- "Used when a PEP 3101 format string that uses named fields "
- "doesn't receive one or more required keywords.",
- {'minversion': (2, 7)}),
- 'W1304': ("Unused format argument %r",
- "unused-format-string-argument",
- "Used when a PEP 3101 format string that uses named "
- "fields is used with an argument that "
- "is not required by the format string.",
- {'minversion': (2, 7)}),
- 'W1305': ("Format string contains both automatic field numbering "
- "and manual field specification",
- "format-combined-specification",
- "Used when a PEP 3101 format string contains both automatic "
- "field numbering (e.g. '{}') and manual field "
- "specification (e.g. '{0}').",
- {'minversion': (2, 7)}),
- 'W1306': ("Missing format attribute %r in format specifier %r",
- "missing-format-attribute",
- "Used when a PEP 3101 format string uses an "
- "attribute specifier ({0.length}), but the argument "
- "passed for formatting doesn't have that attribute.",
- {'minversion': (2, 7)}),
- 'W1307': ("Using invalid lookup key %r in format specifier %r",
- "invalid-format-index",
- "Used when a PEP 3101 format string uses a lookup specifier "
- "({a[1]}), but the argument passed for formatting "
- "doesn't contain or doesn't have that key as an attribute.",
- {'minversion': (2, 7)})
- }
-
-OTHER_NODES = (astroid.Const, astroid.List, astroid.Repr,
- astroid.Lambda, astroid.FunctionDef,
- astroid.ListComp, astroid.SetComp, astroid.GeneratorExp)
-
-if _PY3K:
- import _string # pylint: disable=wrong-import-position, wrong-import-order
-
- def split_format_field_names(format_string):
- return _string.formatter_field_name_split(format_string)
-else:
- def _field_iterator_convertor(iterator):
- for is_attr, key in iterator:
- if isinstance(key, numbers.Number):
- yield is_attr, int(key)
- else:
- yield is_attr, key
-
- def split_format_field_names(format_string):
- try:
- keyname, fielditerator = format_string._formatter_field_name_split()
- except ValueError:
- raise utils.IncompleteFormatString
- # it will return longs, instead of ints, which will complicate
- # the output
- return keyname, _field_iterator_convertor(fielditerator)
-
-
-def collect_string_fields(format_string):
- """ Given a format string, return an iterator
- of all the valid format fields. It handles nested fields
- as well.
- """
-
- formatter = string.Formatter()
- try:
- parseiterator = formatter.parse(format_string)
- for result in parseiterator:
- if all(item is None for item in result[1:]):
- # not a replacement format
- continue
- name = result[1]
- nested = result[2]
- yield name
- if nested:
- for field in collect_string_fields(nested):
- yield field
- except ValueError as exc:
- # Probably the format string is invalid.
- if exc.args[0].startswith("cannot switch from manual"):
- # On Jython, parsing a string with both manual
- # and automatic positions will fail with a ValueError,
- # while on CPython it will simply return the fields,
- # the validation being done in the interpreter (?).
- # We're just returning two mixed fields in order
- # to trigger the format-combined-specification check.
- yield ""
- yield "1"
- return
- raise utils.IncompleteFormatString(format_string)
-
-def parse_format_method_string(format_string):
- """
- Parses a PEP 3101 format string, returning a tuple of
- (keys, num_args, manual_pos_arg),
- where keys is the set of mapping keys in the format string, num_args
- is the number of arguments required by the format string and
- manual_pos_arg is the number of arguments passed with the position.
- """
- keys = []
- num_args = 0
- manual_pos_arg = set()
- for name in collect_string_fields(format_string):
- if name and str(name).isdigit():
- manual_pos_arg.add(str(name))
- elif name:
- keyname, fielditerator = split_format_field_names(name)
- if isinstance(keyname, numbers.Number):
- # In Python 2 it will return long which will lead
- # to different output between 2 and 3
- manual_pos_arg.add(str(keyname))
- keyname = int(keyname)
- try:
- keys.append((keyname, list(fielditerator)))
- except ValueError:
- raise utils.IncompleteFormatString()
- else:
- num_args += 1
- return keys, num_args, len(manual_pos_arg)
-
-def get_args(callfunc):
- """Get the arguments from the given `CallFunc` node.
-
- Return a tuple, where the first element is the
- number of positional arguments and the second element
- is the keyword arguments in a dict.
- """
- if callfunc.keywords:
- named = {arg.arg: utils.safe_infer(arg.value)
- for arg in callfunc.keywords}
- else:
- named = {}
- positional = len(callfunc.args)
- return positional, named
-
-def get_access_path(key, parts):
- """ Given a list of format specifiers, returns
- the final access path (e.g. a.b.c[0][1]).
- """
- path = []
- for is_attribute, specifier in parts:
- if is_attribute:
- path.append(".{}".format(specifier))
- else:
- path.append("[{!r}]".format(specifier))
- return str(key) + "".join(path)
-
-
-class StringFormatChecker(BaseChecker):
- """Checks string formatting operations to ensure that the format string
- is valid and the arguments match the format string.
- """
-
- __implements__ = (IAstroidChecker,)
- name = 'string'
- msgs = MSGS
-
- @check_messages(*(MSGS.keys()))
- def visit_binop(self, node):
- if node.op != '%':
- return
- left = node.left
- args = node.right
-
- if not (isinstance(left, astroid.Const)
- and isinstance(left.value, six.string_types)):
- return
- format_string = left.value
- try:
- required_keys, required_num_args = \
- utils.parse_format_string(format_string)
- except utils.UnsupportedFormatCharacter as e:
- c = format_string[e.index]
- self.add_message('bad-format-character',
- node=node, args=(c, ord(c), e.index))
- return
- except utils.IncompleteFormatString:
- self.add_message('truncated-format-string', node=node)
- return
- if required_keys and required_num_args:
- # The format string uses both named and unnamed format
- # specifiers.
- self.add_message('mixed-format-string', node=node)
- elif required_keys:
- # The format string uses only named format specifiers.
- # Check that the RHS of the % operator is a mapping object
- # that contains precisely the set of keys required by the
- # format string.
- if isinstance(args, astroid.Dict):
- keys = set()
- unknown_keys = False
- for k, _ in args.items:
- if isinstance(k, astroid.Const):
- key = k.value
- if isinstance(key, six.string_types):
- keys.add(key)
- else:
- self.add_message('bad-format-string-key',
- node=node, args=key)
- else:
- # One of the keys was something other than a
- # constant. Since we can't tell what it is,
- # suppress checks for missing keys in the
- # dictionary.
- unknown_keys = True
- if not unknown_keys:
- for key in required_keys:
- if key not in keys:
- self.add_message('missing-format-string-key',
- node=node, args=key)
- for key in keys:
- if key not in required_keys:
- self.add_message('unused-format-string-key',
- node=node, args=key)
- elif isinstance(args, OTHER_NODES + (astroid.Tuple,)):
- type_name = type(args).__name__
- self.add_message('format-needs-mapping',
- node=node, args=type_name)
- # else:
- # The RHS of the format specifier is a name or
- # expression. It may be a mapping object, so
- # there's nothing we can check.
- else:
- # The format string uses only unnamed format specifiers.
- # Check that the number of arguments passed to the RHS of
- # the % operator matches the number required by the format
- # string.
- if isinstance(args, astroid.Tuple):
- rhs_tuple = utils.safe_infer(args)
- num_args = None
- if rhs_tuple not in (None, astroid.Uninferable):
- num_args = len(rhs_tuple.elts)
- elif isinstance(args, OTHER_NODES + (astroid.Dict, astroid.DictComp)):
- num_args = 1
- else:
- # The RHS of the format specifier is a name or
- # expression. It could be a tuple of unknown size, so
- # there's nothing we can check.
- num_args = None
- if num_args is not None:
- if num_args > required_num_args:
- self.add_message('too-many-format-args', node=node)
- elif num_args < required_num_args:
- self.add_message('too-few-format-args', node=node)
-
-
- @check_messages(*(MSGS.keys()))
- def visit_call(self, node):
- func = utils.safe_infer(node.func)
- if (isinstance(func, astroid.BoundMethod)
- and isinstance(func.bound, astroid.Instance)
- and func.bound.name in ('str', 'unicode', 'bytes')):
- if func.name in ('strip', 'lstrip', 'rstrip') and node.args:
- arg = utils.safe_infer(node.args[0])
- if not isinstance(arg, astroid.Const):
- return
- if len(arg.value) != len(set(arg.value)):
- self.add_message('bad-str-strip-call', node=node,
- args=(func.bound.name, func.name))
- elif func.name == 'format':
- if _PY27 or _PY3K:
- self._check_new_format(node, func)
-
- def _check_new_format(self, node, func):
- """ Check the new string formatting. """
- # TODO: skip (for now) format nodes which don't have
- # an explicit string on the left side of the format operation.
- # We do this because our inference engine can't properly handle
- # redefinitions of the original string.
- # For more details, see issue 287.
- #
- # Note that there may not be any left side at all, if the format method
- # has been assigned to another variable. See issue 351. For example:
- #
- # fmt = 'some string {}'.format
- # fmt('arg')
- if (isinstance(node.func, astroid.Attribute)
- and not isinstance(node.func.expr, astroid.Const)):
- return
- try:
- strnode = next(func.bound.infer())
- except astroid.InferenceError:
- return
- if not isinstance(strnode, astroid.Const):
- return
- if not isinstance(strnode.value, six.string_types):
- return
-
- if node.starargs or node.kwargs:
- return
- try:
- positional, named = get_args(node)
- except astroid.InferenceError:
- return
- try:
- fields, num_args, manual_pos = parse_format_method_string(strnode.value)
- except utils.IncompleteFormatString:
- self.add_message('bad-format-string', node=node)
- return
-
- named_fields = set(field[0] for field in fields
- if isinstance(field[0], six.string_types))
- if num_args and manual_pos:
- self.add_message('format-combined-specification',
- node=node)
- return
-
- check_args = False
- # Consider "{[0]} {[1]}" as num_args.
- num_args += sum(1 for field in named_fields
- if field == '')
- if named_fields:
- for field in named_fields:
- if field not in named and field:
- self.add_message('missing-format-argument-key',
- node=node,
- args=(field, ))
- for field in named:
- if field not in named_fields:
- self.add_message('unused-format-string-argument',
- node=node,
- args=(field, ))
- # num_args can be 0 if manual_pos is not.
- num_args = num_args or manual_pos
- if positional or num_args:
- empty = any(True for field in named_fields
- if field == '')
- if named or empty:
- # Verify the required number of positional arguments
- # only if the .format got at least one keyword argument.
- # This means that the format strings accepts both
- # positional and named fields and we should warn
- # when one of the them is missing or is extra.
- check_args = True
- else:
- check_args = True
- if check_args:
- # num_args can be 0 if manual_pos is not.
- num_args = num_args or manual_pos
- if positional > num_args:
- self.add_message('too-many-format-args', node=node)
- elif positional < num_args:
- self.add_message('too-few-format-args', node=node)
-
- self._check_new_format_specifiers(node, fields, named)
-
- def _check_new_format_specifiers(self, node, fields, named):
- """
- Check attribute and index access in the format
- string ("{0.a}" and "{0[a]}").
- """
- for key, specifiers in fields:
- # Obtain the argument. If it can't be obtained
- # or infered, skip this check.
- if key == '':
- # {[0]} will have an unnamed argument, defaulting
- # to 0. It will not be present in `named`, so use the value
- # 0 for it.
- key = 0
- if isinstance(key, numbers.Number):
- try:
- argname = utils.get_argument_from_call(node, key)
- except utils.NoSuchArgumentError:
- continue
- else:
- if key not in named:
- continue
- argname = named[key]
- if argname in (astroid.YES, None):
- continue
- try:
- argument = next(argname.infer())
- except astroid.InferenceError:
- continue
- if not specifiers or argument is astroid.YES:
- # No need to check this key if it doesn't
- # use attribute / item access
- continue
- if argument.parent and isinstance(argument.parent, astroid.Arguments):
- # Ignore any object coming from an argument,
- # because we can't infer its value properly.
- continue
- previous = argument
- parsed = []
- for is_attribute, specifier in specifiers:
- if previous is astroid.YES:
- break
- parsed.append((is_attribute, specifier))
- if is_attribute:
- try:
- previous = previous.getattr(specifier)[0]
- except astroid.NotFoundError:
- if (hasattr(previous, 'has_dynamic_getattr') and
- previous.has_dynamic_getattr()):
- # Don't warn if the object has a custom __getattr__
- break
- path = get_access_path(key, parsed)
- self.add_message('missing-format-attribute',
- args=(specifier, path),
- node=node)
- break
- else:
- warn_error = False
- if hasattr(previous, 'getitem'):
- try:
- previous = previous.getitem(astroid.Const(specifier))
- except (astroid.AstroidIndexError,
- astroid.AstroidTypeError,
- astroid.AttributeInferenceError):
- warn_error = True
- except astroid.InferenceError:
- break
- if previous is astroid.Uninferable:
- break
- else:
- try:
- # Lookup __getitem__ in the current node,
- # but skip further checks, because we can't
- # retrieve the looked object
- previous.getattr('__getitem__')
- break
- except astroid.NotFoundError:
- warn_error = True
- if warn_error:
- path = get_access_path(key, parsed)
- self.add_message('invalid-format-index',
- args=(specifier, path),
- node=node)
- break
-
- try:
- previous = next(previous.infer())
- except astroid.InferenceError:
- # can't check further if we can't infer it
- break
-
-
-class StringConstantChecker(BaseTokenChecker):
- """Check string literals"""
- __implements__ = (ITokenChecker, IRawChecker)
- name = 'string_constant'
- msgs = {
- 'W1401': ('Anomalous backslash in string: \'%s\'. '
- 'String constant might be missing an r prefix.',
- 'anomalous-backslash-in-string',
- 'Used when a backslash is in a literal string but not as an '
- 'escape.'),
- 'W1402': ('Anomalous Unicode escape in byte string: \'%s\'. '
- 'String constant might be missing an r or u prefix.',
- 'anomalous-unicode-escape-in-string',
- 'Used when an escape like \\u is encountered in a byte '
- 'string where it has no effect.'),
- }
-
- # Characters that have a special meaning after a backslash in either
- # Unicode or byte strings.
- ESCAPE_CHARACTERS = 'abfnrtvx\n\r\t\\\'\"01234567'
-
- # TODO(mbp): Octal characters are quite an edge case today; people may
- # prefer a separate warning where they occur. \0 should be allowed.
-
- # Characters that have a special meaning after a backslash but only in
- # Unicode strings.
- UNICODE_ESCAPE_CHARACTERS = 'uUN'
-
- def process_module(self, module):
- self._unicode_literals = 'unicode_literals' in module.future_imports
-
- def process_tokens(self, tokens):
- for (tok_type, token, (start_row, _), _, _) in tokens:
- if tok_type == tokenize.STRING:
- # 'token' is the whole un-parsed token; we can look at the start
- # of it to see whether it's a raw or unicode string etc.
- self.process_string_token(token, start_row)
-
- def process_string_token(self, token, start_row):
- for i, c in enumerate(token):
- if c in '\'\"':
- quote_char = c
- break
- # pylint: disable=undefined-loop-variable
- prefix = token[:i].lower() # markers like u, b, r.
- after_prefix = token[i:]
- if after_prefix[:3] == after_prefix[-3:] == 3 * quote_char:
- string_body = after_prefix[3:-3]
- else:
- string_body = after_prefix[1:-1] # Chop off quotes
- # No special checks on raw strings at the moment.
- if 'r' not in prefix:
- self.process_non_raw_string_token(prefix, string_body, start_row)
-
- def process_non_raw_string_token(self, prefix, string_body, start_row):
- """check for bad escapes in a non-raw string.
-
- prefix: lowercase string of eg 'ur' string prefix markers.
- string_body: the un-parsed body of the string, not including the quote
- marks.
- start_row: integer line number in the source.
- """
- # Walk through the string; if we see a backslash then escape the next
- # character, and skip over it. If we see a non-escaped character,
- # alert, and continue.
- #
- # Accept a backslash when it escapes a backslash, or a quote, or
- # end-of-line, or one of the letters that introduce a special escape
- # sequence
- #
- # TODO(mbp): Maybe give a separate warning about the rarely-used
- # \a \b \v \f?
- #
- # TODO(mbp): We could give the column of the problem character, but
- # add_message doesn't seem to have a way to pass it through at present.
- i = 0
- while True:
- i = string_body.find('\\', i)
- if i == -1:
- break
- # There must be a next character; having a backslash at the end
- # of the string would be a SyntaxError.
- next_char = string_body[i+1]
- match = string_body[i:i+2]
- if next_char in self.UNICODE_ESCAPE_CHARACTERS:
- if 'u' in prefix:
- pass
- elif (_PY3K or self._unicode_literals) and 'b' not in prefix:
- pass # unicode by default
- else:
- self.add_message('anomalous-unicode-escape-in-string',
- line=start_row, args=(match, ))
- elif next_char not in self.ESCAPE_CHARACTERS:
- self.add_message('anomalous-backslash-in-string',
- line=start_row, args=(match, ))
- # Whether it was a valid escape or not, backslash followed by
- # another character can always be consumed whole: the second
- # character can never be the start of a new backslash escape.
- i += 2
-
-
-
-def register(linter):
- """required method to auto register this checker """
- linter.register_checker(StringFormatChecker(linter))
- linter.register_checker(StringConstantChecker(linter))
diff --git a/pymode/libs/pylint/checkers/typecheck.py b/pymode/libs/pylint/checkers/typecheck.py
deleted file mode 100644
index 8ae8f446..00000000
--- a/pymode/libs/pylint/checkers/typecheck.py
+++ /dev/null
@@ -1,1289 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2013-2014, 2016 Google, Inc.
-# Copyright (c) 2014-2016 Claudiu Popa
-# Copyright (c) 2014 Holger Peters
-# Copyright (c) 2014 David Shea
-# Copyright (c) 2015 Radu Ciorba
-# Copyright (c) 2015 Rene Zhang
-# Copyright (c) 2015 Dmitry Pribysh
-# Copyright (c) 2016 Jakub Wilk
-# Copyright (c) 2016 Jürgen Hermann
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""try to find more bugs in the code using astroid inference capabilities
-"""
-
-import collections
-import fnmatch
-import heapq
-import itertools
-import operator
-import re
-import shlex
-import sys
-
-import six
-
-import astroid
-import astroid.context
-import astroid.arguments
-import astroid.nodes
-from astroid import exceptions
-from astroid.interpreter import dunder_lookup
-from astroid import objects
-from astroid import bases
-
-from pylint.interfaces import IAstroidChecker, INFERENCE
-from pylint.checkers import BaseChecker
-from pylint.checkers.utils import (
- is_super, check_messages, decorated_with_property,
- decorated_with, node_ignores_exception,
- is_iterable, is_mapping, supports_membership_test,
- is_comprehension, is_inside_abstract_class,
- supports_getitem,
- supports_setitem,
- supports_delitem,
- safe_infer,
- has_known_bases,
- is_builtin_object,
- singledispatch)
-
-
-BUILTINS = six.moves.builtins.__name__
-STR_FORMAT = "%s.str.format" % BUILTINS
-
-
-def _unflatten(iterable):
- for index, elem in enumerate(iterable):
- if (isinstance(elem, collections.Sequence) and
- not isinstance(elem, six.string_types)):
- for single_elem in _unflatten(elem):
- yield single_elem
- elif elem and not index:
- # We're interested only in the first element.
- yield elem
-
-
-def _is_owner_ignored(owner, name, ignored_classes, ignored_modules):
- """Check if the given owner should be ignored
-
- This will verify if the owner's module is in *ignored_modules*
- or the owner's module fully qualified name is in *ignored_modules*
- or if the *ignored_modules* contains a pattern which catches
- the fully qualified name of the module.
-
- Also, similar checks are done for the owner itself, if its name
- matches any name from the *ignored_classes* or if its qualified
- name can be found in *ignored_classes*.
- """
- ignored_modules = set(ignored_modules)
- module_name = owner.root().name
- module_qname = owner.root().qname()
- if any(module_name in ignored_modules or
- module_qname in ignored_modules or
- fnmatch.fnmatch(module_qname, ignore) for ignore in ignored_modules):
- return True
-
- ignored_classes = set(ignored_classes)
- if hasattr(owner, 'qname'):
- qname = owner.qname()
- else:
- qname = ''
- return any(name == ignore or qname == ignore for ignore in ignored_classes)
-
-
-@singledispatch
-def _node_names(node):
- # TODO: maybe we need an ABC for checking if an object is a scoped node
- # or not?
- if not hasattr(node, 'locals'):
- return []
- return node.locals.keys()
-
-
-@_node_names.register(astroid.ClassDef)
-@_node_names.register(astroid.Instance)
-def _(node):
- values = itertools.chain(node.instance_attrs.keys(), node.locals.keys())
-
- try:
- mro = node.mro()[1:]
- except (NotImplementedError, TypeError):
- mro = node.ancestors()
-
- other_values = [value for cls in mro for value in _node_names(cls)]
- return itertools.chain(values, other_values)
-
-
-def _string_distance(seq1, seq2):
- seq2_length = len(seq2)
-
- row = list(range(1, seq2_length + 1)) + [0]
- for seq1_index, seq1_char in enumerate(seq1):
- last_row = row
- row = [0] * seq2_length + [seq1_index + 1]
-
- for seq2_index, seq2_char in enumerate(seq2):
- row[seq2_index] = min(
- last_row[seq2_index] + 1,
- row[seq2_index - 1] + 1,
- last_row[seq2_index - 1] + (seq1_char != seq2_char)
-
- )
-
- return row[seq2_length - 1]
-
-
-def _similar_names(owner, attrname, distance_threshold, max_choices):
- """Given an owner and a name, try to find similar names
-
- The similar names are searched given a distance metric and only
- a given number of choices will be returned.
- """
- possible_names = []
- names = _node_names(owner)
-
- for name in names:
- if name == attrname:
- continue
-
- distance = _string_distance(attrname, name)
- if distance <= distance_threshold:
- possible_names.append((name, distance))
-
- # Now get back the values with a minimum, up to the given
- # limit or choices.
- picked = [name for (name, _) in
- heapq.nsmallest(max_choices, possible_names,
- key=operator.itemgetter(1))]
- return sorted(picked)
-
-
-def _missing_member_hint(owner, attrname, distance_threshold, max_choices):
- names = _similar_names(owner, attrname, distance_threshold, max_choices)
- if not names:
- # No similar name.
- return ""
-
- names = list(map(repr, names))
- if len(names) == 1:
- names = ", ".join(names)
- else:
- names = "one of {} or {}".format(", ".join(names[:-1]), names[-1])
-
- return "; maybe {}?".format(names)
-
-
-MSGS = {
- 'E1101': ('%s %r has no %r member%s',
- 'no-member',
- 'Used when a variable is accessed for an unexistent member.',
- {'old_names': [('E1103', 'maybe-no-member')]}),
- 'E1102': ('%s is not callable',
- 'not-callable',
- 'Used when an object being called has been inferred to a non \
- callable object'),
- 'E1111': ('Assigning to function call which doesn\'t return',
- 'assignment-from-no-return',
- 'Used when an assignment is done on a function call but the \
- inferred function doesn\'t return anything.'),
- 'E1120': ('No value for argument %s in %s call',
- 'no-value-for-parameter',
- 'Used when a function call passes too few arguments.'),
- 'E1121': ('Too many positional arguments for %s call',
- 'too-many-function-args',
- 'Used when a function call passes too many positional \
- arguments.'),
- 'E1123': ('Unexpected keyword argument %r in %s call',
- 'unexpected-keyword-arg',
- 'Used when a function call passes a keyword argument that \
- doesn\'t correspond to one of the function\'s parameter names.'),
- 'E1124': ('Argument %r passed by position and keyword in %s call',
- 'redundant-keyword-arg',
- 'Used when a function call would result in assigning multiple \
- values to a function parameter, one value from a positional \
- argument and one from a keyword argument.'),
- 'E1125': ('Missing mandatory keyword argument %r in %s call',
- 'missing-kwoa',
- ('Used when a function call does not pass a mandatory'
- ' keyword-only argument.'),
- {'minversion': (3, 0)}),
- 'E1126': ('Sequence index is not an int, slice, or instance with __index__',
- 'invalid-sequence-index',
- 'Used when a sequence type is indexed with an invalid type. '
- 'Valid types are ints, slices, and objects with an __index__ '
- 'method.'),
- 'E1127': ('Slice index is not an int, None, or instance with __index__',
- 'invalid-slice-index',
- 'Used when a slice index is not an integer, None, or an object \
- with an __index__ method.'),
- 'E1128': ('Assigning to function call which only returns None',
- 'assignment-from-none',
- 'Used when an assignment is done on a function call but the '
- 'inferred function returns nothing but None.',
- {'old_names': [('W1111', 'assignment-from-none')]}),
- 'E1129': ("Context manager '%s' doesn't implement __enter__ and __exit__.",
- 'not-context-manager',
- 'Used when an instance in a with statement doesn\'t implement '
- 'the context manager protocol(__enter__/__exit__).'),
- 'E1130': ('%s',
- 'invalid-unary-operand-type',
- 'Emitted when a unary operand is used on an object which does not '
- 'support this type of operation'),
- 'E1131': ('%s',
- 'unsupported-binary-operation',
- 'Emitted when a binary arithmetic operation between two '
- 'operands is not supported.'),
- 'E1132': ('Got multiple values for keyword argument %r in function call',
- 'repeated-keyword',
- 'Emitted when a function call got multiple values for a keyword.'),
- 'E1135': ("Value '%s' doesn't support membership test",
- 'unsupported-membership-test',
- 'Emitted when an instance in membership test expression doesn\'t '
- 'implement membership protocol (__contains__/__iter__/__getitem__)'),
- 'E1136': ("Value '%s' is unsubscriptable",
- 'unsubscriptable-object',
- "Emitted when a subscripted value doesn't support subscription"
- "(i.e. doesn't define __getitem__ method)"),
- 'E1137': ("%r does not support item assignment",
- 'unsupported-assignment-operation',
- "Emitted when an object does not support item assignment "
- "(i.e. doesn't define __setitem__ method)"),
- 'E1138': ("%r does not support item deletion",
- 'unsupported-delete-operation',
- "Emitted when an object does not support item deletion "
- "(i.e. doesn't define __delitem__ method)"),
- 'E1139': ('Invalid metaclass %r used',
- 'invalid-metaclass',
- 'Emitted whenever we can detect that a class is using, '
- 'as a metaclass, something which might be invalid for using as '
- 'a metaclass.'),
- }
-
-# builtin sequence types in Python 2 and 3.
-SEQUENCE_TYPES = set(['str', 'unicode', 'list', 'tuple', 'bytearray',
- 'xrange', 'range', 'bytes', 'memoryview'])
-
-
-def _emit_no_member(node, owner, owner_name, ignored_mixins):
- """Try to see if no-member should be emitted for the given owner.
-
- The following cases are ignored:
-
- * the owner is a function and it has decorators.
- * the owner is an instance and it has __getattr__, __getattribute__ implemented
- * the module is explicitly ignored from no-member checks
- * the owner is a class and the name can be found in its metaclass.
- * The access node is protected by an except handler, which handles
- AttributeError, Exception or bare except.
- """
- if node_ignores_exception(node, AttributeError):
- return False
- # skip None anyway
- if isinstance(owner, astroid.Const) and owner.value is None:
- return False
- if is_super(owner) or getattr(owner, 'type', None) == 'metaclass':
- return False
- if ignored_mixins and owner_name[-5:].lower() == 'mixin':
- return False
- if isinstance(owner, astroid.FunctionDef) and owner.decorators:
- return False
- if isinstance(owner, (astroid.Instance, astroid.ClassDef)):
- if owner.has_dynamic_getattr() or not has_known_bases(owner):
- return False
- if isinstance(owner, objects.Super):
- # Verify if we are dealing with an invalid Super object.
- # If it is invalid, then there's no point in checking that
- # it has the required attribute. Also, don't fail if the
- # MRO is invalid.
- try:
- owner.super_mro()
- except (exceptions.MroError, exceptions.SuperError):
- return False
- if not all(map(has_known_bases, owner.type.mro())):
- return False
- return True
-
-
-def _determine_callable(callable_obj):
- # Ordering is important, since BoundMethod is a subclass of UnboundMethod,
- # and Function inherits Lambda.
- if isinstance(callable_obj, astroid.BoundMethod):
- # Bound methods have an extra implicit 'self' argument.
- return callable_obj, 1, callable_obj.type
- elif isinstance(callable_obj, astroid.UnboundMethod):
- return callable_obj, 0, 'unbound method'
- elif isinstance(callable_obj, astroid.FunctionDef):
- return callable_obj, 0, callable_obj.type
- elif isinstance(callable_obj, astroid.Lambda):
- return callable_obj, 0, 'lambda'
- elif isinstance(callable_obj, astroid.ClassDef):
- # Class instantiation, lookup __new__ instead.
- # If we only find object.__new__, we can safely check __init__
- # instead. If __new__ belongs to builtins, then we look
- # again for __init__ in the locals, since we won't have
- # argument information for the builtin __new__ function.
- try:
- # Use the last definition of __new__.
- new = callable_obj.local_attr('__new__')[-1]
- except exceptions.NotFoundError:
- new = None
-
- from_object = new and new.parent.scope().name == 'object'
- from_builtins = new and new.root().name in sys.builtin_module_names
-
- if not new or from_object or from_builtins:
- try:
- # Use the last definition of __init__.
- callable_obj = callable_obj.local_attr('__init__')[-1]
- except exceptions.NotFoundError:
- # do nothing, covered by no-init.
- raise ValueError
- else:
- callable_obj = new
-
- if not isinstance(callable_obj, astroid.FunctionDef):
- raise ValueError
- # both have an extra implicit 'cls'/'self' argument.
- return callable_obj, 1, 'constructor'
- else:
- raise ValueError
-
-
-def _has_parent_of_type(node, node_type, statement):
- """Check if the given node has a parent of the given type."""
- parent = node.parent
- while not isinstance(parent, node_type) and statement.parent_of(parent):
- parent = parent.parent
- return isinstance(parent, node_type)
-
-
-def _is_name_used_as_variadic(name, variadics):
- """Check if the given name is used as a variadic argument."""
- return any(variadic.value == name or variadic.value.parent_of(name)
- for variadic in variadics)
-
-
-def _no_context_variadic_keywords(node):
- statement = node.statement()
- scope = node.scope()
- variadics = ()
-
- if not isinstance(scope, astroid.FunctionDef):
- return False
-
- if isinstance(statement, astroid.Expr) and isinstance(statement.value, astroid.Call):
- call = statement.value
- variadics = call.keywords or ()
-
- return _no_context_variadic(node, scope.args.kwarg, astroid.Keyword, variadics)
-
-
-def _no_context_variadic_positional(node):
- statement = node.statement()
- scope = node.scope()
- variadics = ()
-
- if not isinstance(scope, astroid.FunctionDef):
- return False
-
- if isinstance(statement, astroid.Expr) and isinstance(statement.value, astroid.Call):
- call = statement.value
- variadics = call.starargs
-
- return _no_context_variadic(node, scope.args.vararg, astroid.Starred, variadics)
-
-
-def _no_context_variadic(node, variadic_name, variadic_type, variadics):
- """Verify if the given call node has variadic nodes without context
-
- This is a workaround for handling cases of nested call functions
- which don't have the specific call context at hand.
- Variadic arguments (variable positional arguments and variable
- keyword arguments) are inferred, inherently wrong, by astroid
- as a Tuple, respectively a Dict with empty elements.
- This can lead pylint to believe that a function call receives
- too few arguments.
- """
- statement = node.statement()
- for name in statement.nodes_of_class(astroid.Name):
- if name.name != variadic_name:
- continue
-
- inferred = safe_infer(name)
- if isinstance(inferred, (astroid.List, astroid.Tuple)):
- length = len(inferred.elts)
- elif isinstance(inferred, astroid.Dict):
- length = len(inferred.items)
- else:
- continue
-
- inferred_statement = inferred.statement()
- if not length and isinstance(inferred_statement, astroid.FunctionDef):
- is_in_starred_context = _has_parent_of_type(node, variadic_type, statement)
- used_as_starred_argument = _is_name_used_as_variadic(name, variadics)
- if is_in_starred_context or used_as_starred_argument:
- return True
- return False
-
-
-def _is_invalid_metaclass(metaclass):
- try:
- mro = metaclass.mro()
- except NotImplementedError:
- # Cannot have a metaclass which is not a newstyle class.
- return True
- else:
- if not any(is_builtin_object(cls) and cls.name == 'type'
- for cls in mro):
- return True
- return False
-
-
-def _infer_from_metaclass_constructor(cls, func):
- """Try to infer what the given *func* constructor is building
-
- :param astroid.FunctionDef func:
- A metaclass constructor. Metaclass definitions can be
- functions, which should accept three arguments, the name of
- the class, the bases of the class and the attributes.
- The function could return anything, but usually it should
- be a proper metaclass.
- :param astroid.ClassDef cls:
- The class for which the *func* parameter should generate
- a metaclass.
- :returns:
- The class generated by the function or None,
- if we couldn't infer it.
- :rtype: astroid.ClassDef
- """
- context = astroid.context.InferenceContext()
-
- class_bases = astroid.List()
- class_bases.postinit(elts=cls.bases)
-
- attrs = astroid.Dict()
- local_names = [(name, values[-1]) for name, values in cls.locals.items()]
- attrs.postinit(local_names)
-
- builder_args = astroid.Tuple()
- builder_args.postinit([cls.name, class_bases, attrs])
-
- context.callcontext = astroid.context.CallContext(builder_args)
- try:
- inferred = next(func.infer_call_result(func, context), None)
- except astroid.InferenceError:
- return None
- return inferred or None
-
-
-class TypeChecker(BaseChecker):
- """try to find bugs in the code using type inference
- """
-
- __implements__ = (IAstroidChecker,)
-
- # configuration section name
- name = 'typecheck'
- # messages
- msgs = MSGS
- priority = -1
- # configuration options
- options = (('ignore-on-opaque-inference',
- {'default': True, 'type': 'yn', 'metavar': '',
- 'help': 'This flag controls whether pylint should warn about '
- 'no-member and similar checks whenever an opaque object '
- 'is returned when inferring. The inference can return '
- 'multiple potential results while evaluating a Python object, '
- 'but some branches might not be evaluated, which results in '
- 'partial inference. In that case, it might be useful to still emit '
- 'no-member and other checks for the rest of the inferred objects.'}
- ),
- ('ignore-mixin-members',
- {'default' : True, 'type' : 'yn', 'metavar': '',
- 'help' : 'Tells whether missing members accessed in mixin \
-class should be ignored. A mixin class is detected if its name ends with \
-"mixin" (case insensitive).'}
- ),
- ('ignored-modules',
- {'default': (),
- 'type': 'csv',
- 'metavar': '',
- 'help': 'List of module names for which member attributes '
- 'should not be checked (useful for modules/projects '
- 'where namespaces are manipulated during runtime and '
- 'thus existing member attributes cannot be '
- 'deduced by static analysis. It supports qualified '
- 'module names, as well as Unix pattern matching.'}
- ),
- # the defaults here are *stdlib* names that (almost) always
- # lead to false positives, since their idiomatic use is
- # 'too dynamic' for pylint to grok.
- ('ignored-classes',
- {'default' : ('optparse.Values', 'thread._local', '_thread._local'),
- 'type' : 'csv',
- 'metavar' : '',
- 'help' : 'List of class names for which member attributes '
- 'should not be checked (useful for classes with '
- 'dynamically set attributes). This supports '
- 'the use of qualified names.'}
- ),
-
- ('generated-members',
- {'default' : (),
- 'type' : 'string',
- 'metavar' : '',
- 'help' : 'List of members which are set dynamically and \
-missed by pylint inference system, and so shouldn\'t trigger E1101 when \
-accessed. Python regular expressions are accepted.'}
- ),
- ('contextmanager-decorators',
- {'default': ['contextlib.contextmanager'],
- 'type': 'csv',
- 'metavar': '',
- 'help': 'List of decorators that produce context managers, '
- 'such as contextlib.contextmanager. Add to this list '
- 'to register other decorators that produce valid '
- 'context managers.'}
- ),
- ('missing-member-hint-distance',
- {'default': 1,
- 'type': 'int',
- 'metavar': '',
- 'help': 'The minimum edit distance a name should have in order '
- 'to be considered a similar match for a missing member name.'
- }
- ),
- ('missing-member-max-choices',
- {'default': 1,
- 'type': "int",
- 'metavar': '',
- 'help': 'The total number of similar names that should be taken in '
- 'consideration when showing a hint for a missing member.'
- }
- ),
- ('missing-member-hint',
- {'default': True,
- 'type': "yn",
- 'metavar': '',
- 'help': 'Show a hint with possible names when a member name was not '
- 'found. The aspect of finding the hint is based on edit distance.'
- }
- ),
- )
-
- def open(self):
- # do this in open since config not fully initialized in __init__
- # generated_members may contain regular expressions
- # (surrounded by quote `"` and followed by a comma `,`)
- # REQUEST,aq_parent,"[a-zA-Z]+_set{1,2}"' =>
- # ('REQUEST', 'aq_parent', '[a-zA-Z]+_set{1,2}')
- if isinstance(self.config.generated_members, six.string_types):
- gen = shlex.shlex(self.config.generated_members)
- gen.whitespace += ','
- gen.wordchars += r'[]-+\.*?()|'
- self.config.generated_members = tuple(tok.strip('"') for tok in gen)
-
- @check_messages('invalid-metaclass')
- def visit_classdef(self, node):
-
- def _metaclass_name(metaclass):
- if isinstance(metaclass, (astroid.ClassDef, astroid.FunctionDef)):
- return metaclass.name
- return metaclass.as_string()
-
- metaclass = node.declared_metaclass()
- if not metaclass:
- return
-
- if isinstance(metaclass, astroid.FunctionDef):
- # Try to infer the result.
- metaclass = _infer_from_metaclass_constructor(node, metaclass)
- if not metaclass:
- # Don't do anything if we cannot infer the result.
- return
-
- if isinstance(metaclass, astroid.ClassDef):
- if _is_invalid_metaclass(metaclass):
- self.add_message('invalid-metaclass', node=node,
- args=(_metaclass_name(metaclass), ))
- else:
- self.add_message('invalid-metaclass', node=node,
- args=(_metaclass_name(metaclass), ))
-
- def visit_assignattr(self, node):
- if isinstance(node.assign_type(), astroid.AugAssign):
- self.visit_attribute(node)
-
- def visit_delattr(self, node):
- self.visit_attribute(node)
-
- @check_messages('no-member')
- def visit_attribute(self, node):
- """check that the accessed attribute exists
-
- to avoid too much false positives for now, we'll consider the code as
- correct if a single of the inferred nodes has the accessed attribute.
-
- function/method, super call and metaclasses are ignored
- """
- for pattern in self.config.generated_members:
- # attribute is marked as generated, stop here
- if re.match(pattern, node.attrname):
- return
- if re.match(pattern, node.as_string()):
- return
-
- try:
- inferred = list(node.expr.infer())
- except exceptions.InferenceError:
- return
-
- # list of (node, nodename) which are missing the attribute
- missingattr = set()
-
- non_opaque_inference_results = [
- owner for owner in inferred
- if owner is not astroid.Uninferable
- and not isinstance(owner, astroid.nodes.Unknown)
- ]
- if (len(non_opaque_inference_results) != len(inferred)
- and self.config.ignore_on_opaque_inference):
- # There is an ambiguity in the inference. Since we can't
- # make sure that we won't emit a false positive, we just stop
- # whenever the inference returns an opaque inference object.
- return
-
- for owner in non_opaque_inference_results:
- name = getattr(owner, 'name', None)
- if _is_owner_ignored(owner, name, self.config.ignored_classes,
- self.config.ignored_modules):
- continue
-
- try:
- if not [n for n in owner.getattr(node.attrname)
- if not isinstance(n.statement(), astroid.AugAssign)]:
- missingattr.add((owner, name))
- continue
- except AttributeError:
- # XXX method / function
- continue
- except exceptions.NotFoundError:
- # This can't be moved before the actual .getattr call,
- # because there can be more values inferred and we are
- # stopping after the first one which has the attribute in question.
- # The problem is that if the first one has the attribute,
- # but we continue to the next values which doesn't have the
- # attribute, then we'll have a false positive.
- # So call this only after the call has been made.
- if not _emit_no_member(node, owner, name,
- self.config.ignore_mixin_members):
- continue
- missingattr.add((owner, name))
- continue
- # stop on the first found
- break
- else:
- # we have not found any node with the attributes, display the
- # message for infered nodes
- done = set()
- for owner, name in missingattr:
- if isinstance(owner, astroid.Instance):
- actual = owner._proxied
- else:
- actual = owner
- if actual in done:
- continue
- done.add(actual)
-
- if self.config.missing_member_hint:
- hint = _missing_member_hint(owner, node.attrname,
- self.config.missing_member_hint_distance,
- self.config.missing_member_max_choices)
- else:
- hint = ""
-
- self.add_message('no-member', node=node,
- args=(owner.display_type(), name,
- node.attrname, hint),
- confidence=INFERENCE)
-
- @check_messages('assignment-from-no-return', 'assignment-from-none')
- def visit_assign(self, node):
- """check that if assigning to a function call, the function is
- possibly returning something valuable
- """
- if not isinstance(node.value, astroid.Call):
- return
- function_node = safe_infer(node.value.func)
- # skip class, generator and incomplete function definition
- if not (isinstance(function_node, astroid.FunctionDef) and
- function_node.root().fully_defined()):
- return
- if function_node.is_generator() \
- or function_node.is_abstract(pass_is_abstract=False):
- return
- returns = list(function_node.nodes_of_class(astroid.Return,
- skip_klass=astroid.FunctionDef))
- if not returns:
- self.add_message('assignment-from-no-return', node=node)
- else:
- for rnode in returns:
- if not (isinstance(rnode.value, astroid.Const)
- and rnode.value.value is None
- or rnode.value is None):
- break
- else:
- self.add_message('assignment-from-none', node=node)
-
- def _check_uninferable_callfunc(self, node):
- """
- Check that the given uninferable CallFunc node does not
- call an actual function.
- """
- if not isinstance(node.func, astroid.Attribute):
- return
-
- # Look for properties. First, obtain
- # the lhs of the Getattr node and search the attribute
- # there. If that attribute is a property or a subclass of properties,
- # then most likely it's not callable.
-
- # TODO: since astroid doesn't understand descriptors very well
- # we will not handle them here, right now.
-
- expr = node.func.expr
- klass = safe_infer(expr)
- if (klass is None or klass is astroid.YES or
- not isinstance(klass, astroid.Instance)):
- return
-
- try:
- attrs = klass._proxied.getattr(node.func.attrname)
- except exceptions.NotFoundError:
- return
-
- for attr in attrs:
- if attr is astroid.YES:
- continue
- if not isinstance(attr, astroid.FunctionDef):
- continue
-
- # Decorated, see if it is decorated with a property.
- # Also, check the returns and see if they are callable.
- if decorated_with_property(attr):
- if all(return_node.callable()
- for return_node in attr.infer_call_result(node)):
- continue
- else:
- self.add_message('not-callable', node=node,
- args=node.func.as_string())
- break
-
- @check_messages(*(list(MSGS.keys())))
- def visit_call(self, node):
- """check that called functions/methods are inferred to callable objects,
- and that the arguments passed to the function match the parameters in
- the inferred function's definition
- """
- # Build the set of keyword arguments, checking for duplicate keywords,
- # and count the positional arguments.
- call_site = astroid.arguments.CallSite.from_call(node)
- num_positional_args = len(call_site.positional_arguments)
- keyword_args = list(call_site.keyword_arguments.keys())
-
- # Determine if we don't have a context for our call and we use variadics.
- if isinstance(node.scope(), astroid.FunctionDef):
- has_no_context_positional_variadic = _no_context_variadic_positional(node)
- has_no_context_keywords_variadic = _no_context_variadic_keywords(node)
- else:
- has_no_context_positional_variadic = has_no_context_keywords_variadic = False
-
- called = safe_infer(node.func)
- # only function, generator and object defining __call__ are allowed
- if called and not called.callable():
- if isinstance(called, astroid.Instance) and not has_known_bases(called):
- # Don't emit if we can't make sure this object is callable.
- pass
- else:
- self.add_message('not-callable', node=node,
- args=node.func.as_string())
-
- self._check_uninferable_callfunc(node)
-
- try:
- called, implicit_args, callable_name = _determine_callable(called)
- except ValueError:
- # Any error occurred during determining the function type, most of
- # those errors are handled by different warnings.
- return
-
- num_positional_args += implicit_args
- if called.args.args is None:
- # Built-in functions have no argument information.
- return
-
- if len(called.argnames()) != len(set(called.argnames())):
- # Duplicate parameter name (see duplicate-argument). We can't really
- # make sense of the function call in this case, so just return.
- return
-
- # Warn about duplicated keyword arguments, such as `f=24, **{'f': 24}`
- for keyword in call_site.duplicated_keywords:
- self.add_message('repeated-keyword',
- node=node, args=(keyword, ))
-
- if call_site.has_invalid_arguments() or call_site.has_invalid_keywords():
- # Can't make sense of this.
- return
-
- # Analyze the list of formal parameters.
- num_mandatory_parameters = len(called.args.args) - len(called.args.defaults)
- parameters = []
- parameter_name_to_index = {}
- for i, arg in enumerate(called.args.args):
- if isinstance(arg, astroid.Tuple):
- name = None
- # Don't store any parameter names within the tuple, since those
- # are not assignable from keyword arguments.
- else:
- assert isinstance(arg, astroid.AssignName)
- # This occurs with:
- # def f( (a), (b) ): pass
- name = arg.name
- parameter_name_to_index[name] = i
- if i >= num_mandatory_parameters:
- defval = called.args.defaults[i - num_mandatory_parameters]
- else:
- defval = None
- parameters.append([(name, defval), False])
-
- kwparams = {}
- for i, arg in enumerate(called.args.kwonlyargs):
- if isinstance(arg, astroid.Keyword):
- name = arg.arg
- else:
- assert isinstance(arg, astroid.AssignName)
- name = arg.name
- kwparams[name] = [called.args.kw_defaults[i], False]
-
- # Match the supplied arguments against the function parameters.
-
- # 1. Match the positional arguments.
- for i in range(num_positional_args):
- if i < len(parameters):
- parameters[i][1] = True
- elif called.args.vararg is not None:
- # The remaining positional arguments get assigned to the *args
- # parameter.
- break
- else:
- # Too many positional arguments.
- self.add_message('too-many-function-args',
- node=node, args=(callable_name,))
- break
-
- # 2. Match the keyword arguments.
- for keyword in keyword_args:
- if keyword in parameter_name_to_index:
- i = parameter_name_to_index[keyword]
- if parameters[i][1]:
- # Duplicate definition of function parameter.
-
- # Might be too hardcoded, but this can actually
- # happen when using str.format and `self` is passed
- # by keyword argument, as in `.format(self=self)`.
- # It's perfectly valid to so, so we're just skipping
- # it if that's the case.
- if not (keyword == 'self' and called.qname() == STR_FORMAT):
- self.add_message('redundant-keyword-arg',
- node=node, args=(keyword, callable_name))
- else:
- parameters[i][1] = True
- elif keyword in kwparams:
- if kwparams[keyword][1]: # XXX is that even possible?
- # Duplicate definition of function parameter.
- self.add_message('redundant-keyword-arg', node=node,
- args=(keyword, callable_name))
- else:
- kwparams[keyword][1] = True
- elif called.args.kwarg is not None:
- # The keyword argument gets assigned to the **kwargs parameter.
- pass
- else:
- # Unexpected keyword argument.
- self.add_message('unexpected-keyword-arg', node=node,
- args=(keyword, callable_name))
-
- # 3. Match the **kwargs, if any.
- if node.kwargs:
- for i, [(name, defval), assigned] in enumerate(parameters):
- # Assume that *kwargs provides values for all remaining
- # unassigned named parameters.
- if name is not None:
- parameters[i][1] = True
- else:
- # **kwargs can't assign to tuples.
- pass
-
- # Check that any parameters without a default have been assigned
- # values.
- for [(name, defval), assigned] in parameters:
- if (defval is None) and not assigned:
- if name is None:
- display_name = ''
- else:
- display_name = repr(name)
- # TODO(cpopa): this should be removed after PyCQA/astroid/issues/177
- if not has_no_context_positional_variadic:
- self.add_message('no-value-for-parameter', node=node,
- args=(display_name, callable_name))
-
- for name in kwparams:
- defval, assigned = kwparams[name]
- if defval is None and not assigned and not has_no_context_keywords_variadic:
- self.add_message('missing-kwoa', node=node,
- args=(name, callable_name))
-
- @check_messages('invalid-sequence-index')
- def visit_extslice(self, node):
- # Check extended slice objects as if they were used as a sequence
- # index to check if the object being sliced can support them
- return self.visit_index(node)
-
- @check_messages('invalid-sequence-index')
- def visit_index(self, node):
- if not node.parent or not hasattr(node.parent, "value"):
- return
- # Look for index operations where the parent is a sequence type.
- # If the types can be determined, only allow indices to be int,
- # slice or instances with __index__.
- parent_type = safe_infer(node.parent.value)
- if not isinstance(parent_type, (astroid.ClassDef, astroid.Instance)):
- return
- if not has_known_bases(parent_type):
- return
-
- # Determine what method on the parent this index will use
- # The parent of this node will be a Subscript, and the parent of that
- # node determines if the Subscript is a get, set, or delete operation.
- if node.parent.ctx is astroid.Store:
- methodname = '__setitem__'
- elif node.parent.ctx is astroid.Del:
- methodname = '__delitem__'
- else:
- methodname = '__getitem__'
-
- # Check if this instance's __getitem__, __setitem__, or __delitem__, as
- # appropriate to the statement, is implemented in a builtin sequence
- # type. This way we catch subclasses of sequence types but skip classes
- # that override __getitem__ and which may allow non-integer indices.
- try:
- methods = dunder_lookup.lookup(parent_type, methodname)
- if methods is astroid.YES:
- return
- itemmethod = methods[0]
- except (exceptions.NotFoundError,
- exceptions.AttributeInferenceError,
- IndexError):
- return
- if not isinstance(itemmethod, astroid.FunctionDef):
- return
- if itemmethod.root().name != BUILTINS:
- return
- if not itemmethod.parent:
- return
- if itemmethod.parent.name not in SEQUENCE_TYPES:
- return
-
- # For ExtSlice objects coming from visit_extslice, no further
- # inference is necessary, since if we got this far the ExtSlice
- # is an error.
- if isinstance(node, astroid.ExtSlice):
- index_type = node
- else:
- index_type = safe_infer(node)
- if index_type is None or index_type is astroid.YES:
- return
- # Constants must be of type int
- if isinstance(index_type, astroid.Const):
- if isinstance(index_type.value, int):
- return
- # Instance values must be int, slice, or have an __index__ method
- elif isinstance(index_type, astroid.Instance):
- if index_type.pytype() in (BUILTINS + '.int', BUILTINS + '.slice'):
- return
- try:
- index_type.getattr('__index__')
- return
- except exceptions.NotFoundError:
- pass
- elif isinstance(index_type, astroid.Slice):
- # Delegate to visit_slice. A slice can be present
- # here after inferring the index node, which could
- # be a `slice(...)` call for instance.
- return self.visit_slice(index_type)
-
- # Anything else is an error
- self.add_message('invalid-sequence-index', node=node)
-
- @check_messages('invalid-slice-index')
- def visit_slice(self, node):
- # Check the type of each part of the slice
- for index in (node.lower, node.upper, node.step):
- if index is None:
- continue
-
- index_type = safe_infer(index)
- if index_type is None or index_type is astroid.YES:
- continue
-
- # Constants must of type int or None
- if isinstance(index_type, astroid.Const):
- if isinstance(index_type.value, (int, type(None))):
- continue
- # Instance values must be of type int, None or an object
- # with __index__
- elif isinstance(index_type, astroid.Instance):
- if index_type.pytype() in (BUILTINS + '.int',
- BUILTINS + '.NoneType'):
- continue
-
- try:
- index_type.getattr('__index__')
- return
- except exceptions.NotFoundError:
- pass
-
- # Anything else is an error
- self.add_message('invalid-slice-index', node=node)
-
- @check_messages('not-context-manager')
- def visit_with(self, node):
- for ctx_mgr, _ in node.items:
- context = astroid.context.InferenceContext()
- infered = safe_infer(ctx_mgr, context=context)
- if infered is None or infered is astroid.YES:
- continue
-
- if isinstance(infered, bases.Generator):
- # Check if we are dealing with a function decorated
- # with contextlib.contextmanager.
- if decorated_with(infered.parent,
- self.config.contextmanager_decorators):
- continue
- # If the parent of the generator is not the context manager itself,
- # that means that it could have been returned from another
- # function which was the real context manager.
- # The following approach is more of a hack rather than a real
- # solution: walk all the inferred statements for the
- # given *ctx_mgr* and if you find one function scope
- # which is decorated, consider it to be the real
- # manager and give up, otherwise emit not-context-manager.
- # See the test file for not_context_manager for a couple
- # of self explaining tests.
- for path in six.moves.filter(None, _unflatten(context.path)):
- scope = path.scope()
- if not isinstance(scope, astroid.FunctionDef):
- continue
- if decorated_with(scope,
- self.config.contextmanager_decorators):
- break
- else:
- self.add_message('not-context-manager',
- node=node, args=(infered.name, ))
- else:
- try:
- infered.getattr('__enter__')
- infered.getattr('__exit__')
- except exceptions.NotFoundError:
- if isinstance(infered, astroid.Instance):
- # If we do not know the bases of this class,
- # just skip it.
- if not has_known_bases(infered):
- continue
- # Just ignore mixin classes.
- if self.config.ignore_mixin_members:
- if infered.name[-5:].lower() == 'mixin':
- continue
-
- self.add_message('not-context-manager',
- node=node, args=(infered.name, ))
-
- @check_messages('invalid-unary-operand-type')
- def visit_unaryop(self, node):
- """Detect TypeErrors for unary operands."""
-
- for error in node.type_errors():
- # Let the error customize its output.
- self.add_message('invalid-unary-operand-type',
- args=str(error), node=node)
-
- @check_messages('unsupported-binary-operation')
- def _visit_binop(self, node):
- """Detect TypeErrors for binary arithmetic operands."""
- self._check_binop_errors(node)
-
- @check_messages('unsupported-binary-operation')
- def _visit_augassign(self, node):
- """Detect TypeErrors for augmented binary arithmetic operands."""
- self._check_binop_errors(node)
-
- def _check_binop_errors(self, node):
- for error in node.type_errors():
- # Let the error customize its output.
- if any(isinstance(obj, astroid.ClassDef) and not has_known_bases(obj)
- for obj in (error.left_type, error.right_type)):
- continue
- self.add_message('unsupported-binary-operation',
- args=str(error), node=node)
-
- def _check_membership_test(self, node):
- if is_inside_abstract_class(node):
- return
- if is_comprehension(node):
- return
- infered = safe_infer(node)
- if infered is None or infered is astroid.YES:
- return
- if not supports_membership_test(infered):
- self.add_message('unsupported-membership-test',
- args=node.as_string(),
- node=node)
-
- @check_messages('unsupported-membership-test')
- def visit_compare(self, node):
- if len(node.ops) != 1:
- return
-
- op, right = node.ops[0]
- if op in ['in', 'not in']:
- self._check_membership_test(right)
-
- @check_messages('unsubscriptable-object', 'unsupported-assignment-operation',
- 'unsupported-delete-operation')
- def visit_subscript(self, node):
- supported_protocol = None
- if isinstance(node.value, (astroid.ListComp, astroid.DictComp)):
- return
-
- if node.ctx == astroid.Load:
- supported_protocol = supports_getitem
- msg = 'unsubscriptable-object'
- elif node.ctx == astroid.Store:
- supported_protocol = supports_setitem
- msg = 'unsupported-assignment-operation'
- elif node.ctx == astroid.Del:
- supported_protocol = supports_delitem
- msg = 'unsupported-delete-operation'
-
- if isinstance(node.value, astroid.SetComp):
- self.add_message(msg, args=node.value.as_string(),
- node=node.value)
- return
-
- if is_inside_abstract_class(node):
- return
-
- inferred = safe_infer(node.value)
- if inferred is None or inferred is astroid.YES:
- return
-
- if not supported_protocol(inferred):
- self.add_message(msg, args=node.value.as_string(), node=node.value)
-
-
-class IterableChecker(BaseChecker):
- """
- Checks for non-iterables used in an iterable context.
- Contexts include:
- - for-statement
- - starargs in function call
- - `yield from`-statement
- - list, dict and set comprehensions
- - generator expressions
- Also checks for non-mappings in function call kwargs.
- """
-
- __implements__ = (IAstroidChecker,)
- name = 'iterable_check'
-
- msgs = {'E1133': ('Non-iterable value %s is used in an iterating context',
- 'not-an-iterable',
- 'Used when a non-iterable value is used in place where '
- 'iterable is expected'),
- 'E1134': ('Non-mapping value %s is used in a mapping context',
- 'not-a-mapping',
- 'Used when a non-mapping value is used in place where '
- 'mapping is expected'),
- }
-
- def _check_iterable(self, node):
- if is_inside_abstract_class(node):
- return
- if is_comprehension(node):
- return
- infered = safe_infer(node)
- if infered is None or infered is astroid.YES:
- return
- if not is_iterable(infered):
- self.add_message('not-an-iterable',
- args=node.as_string(),
- node=node)
-
- def _check_mapping(self, node):
- if is_inside_abstract_class(node):
- return
- if isinstance(node, astroid.DictComp):
- return
- infered = safe_infer(node)
- if infered is None or infered is astroid.YES:
- return
- if not is_mapping(infered):
- self.add_message('not-a-mapping',
- args=node.as_string(),
- node=node)
-
- @check_messages('not-an-iterable')
- def visit_for(self, node):
- self._check_iterable(node.iter)
-
- @check_messages('not-an-iterable')
- def visit_yieldfrom(self, node):
- self._check_iterable(node.value)
-
- @check_messages('not-an-iterable', 'not-a-mapping')
- def visit_call(self, node):
- for stararg in node.starargs:
- self._check_iterable(stararg.value)
- for kwarg in node.kwargs:
- self._check_mapping(kwarg.value)
-
- @check_messages('not-an-iterable')
- def visit_listcomp(self, node):
- for gen in node.generators:
- self._check_iterable(gen.iter)
-
- @check_messages('not-an-iterable')
- def visit_dictcomp(self, node):
- for gen in node.generators:
- self._check_iterable(gen.iter)
-
- @check_messages('not-an-iterable')
- def visit_setcomp(self, node):
- for gen in node.generators:
- self._check_iterable(gen.iter)
-
- @check_messages('not-an-iterable')
- def visit_generatorexp(self, node):
- for gen in node.generators:
- self._check_iterable(gen.iter)
-
-
-def register(linter):
- """required method to auto register this checker """
- linter.register_checker(TypeChecker(linter))
- linter.register_checker(IterableChecker(linter))
diff --git a/pymode/libs/pylint/checkers/utils.py b/pymode/libs/pylint/checkers/utils.py
deleted file mode 100644
index 5ed18865..00000000
--- a/pymode/libs/pylint/checkers/utils.py
+++ /dev/null
@@ -1,860 +0,0 @@
-# Copyright (c) 2006-2007, 2009-2014 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2012-2014 Google, Inc.
-# Copyright (c) 2013-2016 Claudiu Popa
-# Copyright (c) 2015 Radu Ciorba
-# Copyright (c) 2015 Dmitry Pribysh
-# Copyright (c) 2016 Ashley Whetter
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-# pylint: disable=W0611
-"""some functions that may be useful for various checkers
-"""
-import collections
-import functools
-try:
- from functools import singledispatch as singledispatch
-except ImportError:
- # pylint: disable=import-error
- from singledispatch import singledispatch as singledispatch
-try:
- from functools import lru_cache
-except ImportError:
- from backports.functools_lru_cache import lru_cache
-import itertools
-import re
-import sys
-import string
-import warnings
-
-import six
-from six.moves import map, builtins # pylint: disable=redefined-builtin
-
-import astroid
-from astroid import bases as _bases
-from astroid import scoped_nodes
-
-
-BUILTINS_NAME = builtins.__name__
-COMP_NODE_TYPES = (astroid.ListComp, astroid.SetComp,
- astroid.DictComp, astroid.GeneratorExp)
-PY3K = sys.version_info[0] == 3
-
-if not PY3K:
- EXCEPTIONS_MODULE = "exceptions"
-else:
- EXCEPTIONS_MODULE = "builtins"
-ABC_METHODS = set(('abc.abstractproperty', 'abc.abstractmethod',
- 'abc.abstractclassmethod', 'abc.abstractstaticmethod'))
-ITER_METHOD = '__iter__'
-NEXT_METHOD = 'next' if six.PY2 else '__next__'
-GETITEM_METHOD = '__getitem__'
-SETITEM_METHOD = '__setitem__'
-DELITEM_METHOD = '__delitem__'
-CONTAINS_METHOD = '__contains__'
-KEYS_METHOD = 'keys'
-
-# Dictionary which maps the number of expected parameters a
-# special method can have to a set of special methods.
-# The following keys are used to denote the parameters restrictions:
-#
-# * None: variable number of parameters
-# * number: exactly that number of parameters
-# * tuple: this are the odd ones. Basically it means that the function
-# can work with any number of arguments from that tuple,
-# although it's best to implement it in order to accept
-# all of them.
-_SPECIAL_METHODS_PARAMS = {
- None: ('__new__', '__init__', '__call__'),
-
- 0: ('__del__', '__repr__', '__str__', '__bytes__', '__hash__', '__bool__',
- '__dir__', '__len__', '__length_hint__', '__iter__', '__reversed__',
- '__neg__', '__pos__', '__abs__', '__invert__', '__complex__', '__int__',
- '__float__', '__neg__', '__pos__', '__abs__', '__complex__', '__int__',
- '__float__', '__index__', '__enter__', '__aenter__', '__getnewargs_ex__',
- '__getnewargs__', '__getstate__', '__reduce__', '__copy__',
- '__unicode__', '__nonzero__', '__await__', '__aiter__', '__anext__',
- '__fspath__'),
-
- 1: ('__format__', '__lt__', '__le__', '__eq__', '__ne__', '__gt__',
- '__ge__', '__getattr__', '__getattribute__', '__delattr__',
- '__delete__', '__instancecheck__', '__subclasscheck__',
- '__getitem__', '__missing__', '__delitem__', '__contains__',
- '__add__', '__sub__', '__mul__', '__truediv__', '__floordiv__',
- '__mod__', '__divmod__', '__lshift__', '__rshift__', '__and__',
- '__xor__', '__or__', '__radd__', '__rsub__', '__rmul__', '__rtruediv__',
- '__rmod__', '__rdivmod__', '__rpow__', '__rlshift__', '__rrshift__',
- '__rand__', '__rxor__', '__ror__', '__iadd__', '__isub__', '__imul__',
- '__itruediv__', '__ifloordiv__', '__imod__', '__ilshift__',
- '__irshift__', '__iand__', '__ixor__', '__ior__', '__ipow__',
- '__setstate__', '__reduce_ex__', '__deepcopy__', '__cmp__',
- '__matmul__', '__rmatmul__', '__div__'),
-
- 2: ('__setattr__', '__get__', '__set__', '__setitem__'),
-
- 3: ('__exit__', '__aexit__'),
-
- (0, 1): ('__round__', ),
-}
-
-SPECIAL_METHODS_PARAMS = {
- name: params
- for params, methods in _SPECIAL_METHODS_PARAMS.items()
- for name in methods
-}
-PYMETHODS = set(SPECIAL_METHODS_PARAMS)
-
-
-class NoSuchArgumentError(Exception):
- pass
-
-def is_inside_except(node):
- """Returns true if node is inside the name of an except handler."""
- current = node
- while current and not isinstance(current.parent, astroid.ExceptHandler):
- current = current.parent
-
- return current and current is current.parent.name
-
-
-def get_all_elements(node):
- """Recursively returns all atoms in nested lists and tuples."""
- if isinstance(node, (astroid.Tuple, astroid.List)):
- for child in node.elts:
- for e in get_all_elements(child):
- yield e
- else:
- yield node
-
-
-def clobber_in_except(node):
- """Checks if an assignment node in an except handler clobbers an existing
- variable.
-
- Returns (True, args for W0623) if assignment clobbers an existing variable,
- (False, None) otherwise.
- """
- if isinstance(node, astroid.AssignAttr):
- return (True, (node.attrname, 'object %r' % (node.expr.as_string(),)))
- elif isinstance(node, astroid.AssignName):
- name = node.name
- if is_builtin(name):
- return (True, (name, 'builtins'))
- else:
- stmts = node.lookup(name)[1]
- if (stmts and not isinstance(stmts[0].assign_type(),
- (astroid.Assign, astroid.AugAssign,
- astroid.ExceptHandler))):
- return (True, (name, 'outer scope (line %s)' % stmts[0].fromlineno))
- return (False, None)
-
-
-def is_super(node):
- """return True if the node is referencing the "super" builtin function
- """
- if getattr(node, 'name', None) == 'super' and \
- node.root().name == BUILTINS_NAME:
- return True
- return False
-
-def is_error(node):
- """return true if the function does nothing but raising an exception"""
- for child_node in node.get_children():
- if isinstance(child_node, astroid.Raise):
- return True
- return False
-
-def is_raising(body):
- """return true if the given statement node raise an exception"""
- for node in body:
- if isinstance(node, astroid.Raise):
- return True
- return False
-
-builtins = builtins.__dict__.copy()
-SPECIAL_BUILTINS = ('__builtins__',) # '__path__', '__file__')
-
-def is_builtin_object(node):
- """Returns True if the given node is an object from the __builtin__ module."""
- return node and node.root().name == BUILTINS_NAME
-
-def is_builtin(name):
- """return true if could be considered as a builtin defined by python
- """
- return name in builtins or name in SPECIAL_BUILTINS
-
-def is_defined_before(var_node):
- """return True if the variable node is defined by a parent node (list,
- set, dict, or generator comprehension, lambda) or in a previous sibling
- node on the same line (statement_defining ; statement_using)
- """
- varname = var_node.name
- _node = var_node.parent
- while _node:
- if isinstance(_node, COMP_NODE_TYPES):
- for ass_node in _node.nodes_of_class(astroid.AssignName):
- if ass_node.name == varname:
- return True
- elif isinstance(_node, astroid.For):
- for ass_node in _node.target.nodes_of_class(astroid.AssignName):
- if ass_node.name == varname:
- return True
- elif isinstance(_node, astroid.With):
- for expr, ids in _node.items:
- if expr.parent_of(var_node):
- break
- if (ids and
- isinstance(ids, astroid.AssignName) and
- ids.name == varname):
- return True
- elif isinstance(_node, (astroid.Lambda, astroid.FunctionDef)):
- if _node.args.is_argument(varname):
- # If the name is found inside a default value
- # of a function, then let the search continue
- # in the parent's tree.
- if _node.args.parent_of(var_node):
- try:
- _node.args.default_value(varname)
- _node = _node.parent
- continue
- except astroid.NoDefault:
- pass
- return True
- if getattr(_node, 'name', None) == varname:
- return True
- break
- elif isinstance(_node, astroid.ExceptHandler):
- if isinstance(_node.name, astroid.AssignName):
- ass_node = _node.name
- if ass_node.name == varname:
- return True
- _node = _node.parent
- # possibly multiple statements on the same line using semi colon separator
- stmt = var_node.statement()
- _node = stmt.previous_sibling()
- lineno = stmt.fromlineno
- while _node and _node.fromlineno == lineno:
- for ass_node in _node.nodes_of_class(astroid.AssignName):
- if ass_node.name == varname:
- return True
- for imp_node in _node.nodes_of_class((astroid.ImportFrom, astroid.Import)):
- if varname in [name[1] or name[0] for name in imp_node.names]:
- return True
- _node = _node.previous_sibling()
- return False
-
-def is_func_default(node):
- """return true if the given Name node is used in function default argument's
- value
- """
- parent = node.scope()
- if isinstance(parent, astroid.FunctionDef):
- for default_node in parent.args.defaults:
- for default_name_node in default_node.nodes_of_class(astroid.Name):
- if default_name_node is node:
- return True
- return False
-
-def is_func_decorator(node):
- """return true if the name is used in function decorator"""
- parent = node.parent
- while parent is not None:
- if isinstance(parent, astroid.Decorators):
- return True
- if (parent.is_statement or
- isinstance(parent, (astroid.Lambda,
- scoped_nodes.ComprehensionScope,
- scoped_nodes.ListComp))):
- break
- parent = parent.parent
- return False
-
-def is_ancestor_name(frame, node):
- """return True if `frame` is a astroid.Class node with `node` in the
- subtree of its bases attribute
- """
- try:
- bases = frame.bases
- except AttributeError:
- return False
- for base in bases:
- if node in base.nodes_of_class(astroid.Name):
- return True
- return False
-
-def assign_parent(node):
- """return the higher parent which is not an AssName, Tuple or List node
- """
- while node and isinstance(node, (astroid.AssignName,
- astroid.Tuple,
- astroid.List)):
- node = node.parent
- return node
-
-
-def overrides_a_method(class_node, name):
- """return True if is a method overridden from an ancestor"""
- for ancestor in class_node.ancestors():
- if name in ancestor and isinstance(ancestor[name], astroid.FunctionDef):
- return True
- return False
-
-def check_messages(*messages):
- """decorator to store messages that are handled by a checker method"""
-
- def store_messages(func):
- func.checks_msgs = messages
- return func
- return store_messages
-
-class IncompleteFormatString(Exception):
- """A format string ended in the middle of a format specifier."""
- pass
-
-class UnsupportedFormatCharacter(Exception):
- """A format character in a format string is not one of the supported
- format characters."""
- def __init__(self, index):
- Exception.__init__(self, index)
- self.index = index
-
-def parse_format_string(format_string):
- """Parses a format string, returning a tuple of (keys, num_args), where keys
- is the set of mapping keys in the format string, and num_args is the number
- of arguments required by the format string. Raises
- IncompleteFormatString or UnsupportedFormatCharacter if a
- parse error occurs."""
- keys = set()
- num_args = 0
- def next_char(i):
- i += 1
- if i == len(format_string):
- raise IncompleteFormatString
- return (i, format_string[i])
- i = 0
- while i < len(format_string):
- char = format_string[i]
- if char == '%':
- i, char = next_char(i)
- # Parse the mapping key (optional).
- key = None
- if char == '(':
- depth = 1
- i, char = next_char(i)
- key_start = i
- while depth != 0:
- if char == '(':
- depth += 1
- elif char == ')':
- depth -= 1
- i, char = next_char(i)
- key_end = i - 1
- key = format_string[key_start:key_end]
-
- # Parse the conversion flags (optional).
- while char in '#0- +':
- i, char = next_char(i)
- # Parse the minimum field width (optional).
- if char == '*':
- num_args += 1
- i, char = next_char(i)
- else:
- while char in string.digits:
- i, char = next_char(i)
- # Parse the precision (optional).
- if char == '.':
- i, char = next_char(i)
- if char == '*':
- num_args += 1
- i, char = next_char(i)
- else:
- while char in string.digits:
- i, char = next_char(i)
- # Parse the length modifier (optional).
- if char in 'hlL':
- i, char = next_char(i)
- # Parse the conversion type (mandatory).
- if PY3K:
- flags = 'diouxXeEfFgGcrs%a'
- else:
- flags = 'diouxXeEfFgGcrs%'
- if char not in flags:
- raise UnsupportedFormatCharacter(i)
- if key:
- keys.add(key)
- elif char != '%':
- num_args += 1
- i += 1
- return keys, num_args
-
-
-def is_attr_protected(attrname):
- """return True if attribute name is protected (start with _ and some other
- details), False otherwise.
- """
- return attrname[0] == '_' and attrname != '_' and not (
- attrname.startswith('__') and attrname.endswith('__'))
-
-def node_frame_class(node):
- """return klass node for a method node (or a staticmethod or a
- classmethod), return null otherwise
- """
- klass = node.frame()
-
- while klass is not None and not isinstance(klass, astroid.ClassDef):
- if klass.parent is None:
- klass = None
- else:
- klass = klass.parent.frame()
-
- return klass
-
-
-def is_attr_private(attrname):
- """Check that attribute name is private (at least two leading underscores,
- at most one trailing underscore)
- """
- regex = re.compile('^_{2,}.*[^_]+_?$')
- return regex.match(attrname)
-
-def get_argument_from_call(callfunc_node, position=None, keyword=None):
- """Returns the specified argument from a function call.
-
- :param astroid.Call callfunc_node: Node representing a function call to check.
- :param int position: position of the argument.
- :param str keyword: the keyword of the argument.
-
- :returns: The node representing the argument, None if the argument is not found.
- :rtype: astroid.Name
- :raises ValueError: if both position and keyword are None.
- :raises NoSuchArgumentError: if no argument at the provided position or with
- the provided keyword.
- """
- if position is None and keyword is None:
- raise ValueError('Must specify at least one of: position or keyword.')
- if position is not None:
- try:
- return callfunc_node.args[position]
- except IndexError:
- pass
- if keyword and callfunc_node.keywords:
- for arg in callfunc_node.keywords:
- if arg.arg == keyword:
- return arg.value
-
- raise NoSuchArgumentError
-
-def inherit_from_std_ex(node):
- """
- Return true if the given class node is subclass of
- exceptions.Exception.
- """
- if node.name in ('Exception', 'BaseException') \
- and node.root().name == EXCEPTIONS_MODULE:
- return True
- return any(inherit_from_std_ex(parent)
- for parent in node.ancestors(recurs=True))
-
-def error_of_type(handler, error_type):
- """
- Check if the given exception handler catches
- the given error_type.
-
- The *handler* parameter is a node, representing an ExceptHandler node.
- The *error_type* can be an exception, such as AttributeError,
- the name of an exception, or it can be a tuple of errors.
- The function will return True if the handler catches any of the
- given errors.
- """
- def stringify_error(error):
- if not isinstance(error, six.string_types):
- return error.__name__
- return error
-
- if not isinstance(error_type, tuple):
- error_type = (error_type, )
- expected_errors = {stringify_error(error) for error in error_type}
- if not handler.type:
- # bare except. While this indeed catches anything, if the desired errors
- # aren't specified directly, then we just ignore it.
- return False
- return handler.catch(expected_errors)
-
-
-def decorated_with_property(node):
- """ Detect if the given function node is decorated with a property. """
- if not node.decorators:
- return False
- for decorator in node.decorators.nodes:
- if not isinstance(decorator, astroid.Name):
- continue
- try:
- if _is_property_decorator(decorator):
- return True
- except astroid.InferenceError:
- pass
- return False
-
-
-def _is_property_decorator(decorator):
- for infered in decorator.infer():
- if isinstance(infered, astroid.ClassDef):
- if infered.root().name == BUILTINS_NAME and infered.name == 'property':
- return True
- for ancestor in infered.ancestors():
- if ancestor.name == 'property' and ancestor.root().name == BUILTINS_NAME:
- return True
-
-
-def decorated_with(func, qnames):
- """Determine if the `func` node has a decorator with the qualified name `qname`."""
- decorators = func.decorators.nodes if func.decorators else []
- for decorator_node in decorators:
- try:
- if any(i is not None and i.qname() in qnames for i in decorator_node.infer()):
- return True
- except astroid.InferenceError:
- continue
- return False
-
-
-@lru_cache(maxsize=1024)
-def unimplemented_abstract_methods(node, is_abstract_cb=None):
- """
- Get the unimplemented abstract methods for the given *node*.
-
- A method can be considered abstract if the callback *is_abstract_cb*
- returns a ``True`` value. The check defaults to verifying that
- a method is decorated with abstract methods.
- The function will work only for new-style classes. For old-style
- classes, it will simply return an empty dictionary.
- For the rest of them, it will return a dictionary of abstract method
- names and their inferred objects.
- """
- if is_abstract_cb is None:
- is_abstract_cb = functools.partial(
- decorated_with, qnames=ABC_METHODS)
- visited = {}
- try:
- mro = reversed(node.mro())
- except NotImplementedError:
- # Old style class, it will not have a mro.
- return {}
- except astroid.ResolveError:
- # Probably inconsistent hierarchy, don'try
- # to figure this out here.
- return {}
- for ancestor in mro:
- for obj in ancestor.values():
- infered = obj
- if isinstance(obj, astroid.AssignName):
- infered = safe_infer(obj)
- if not infered:
- # Might be an abstract function,
- # but since we don't have enough information
- # in order to take this decision, we're taking
- # the *safe* decision instead.
- if obj.name in visited:
- del visited[obj.name]
- continue
- if not isinstance(infered, astroid.FunctionDef):
- if obj.name in visited:
- del visited[obj.name]
- if isinstance(infered, astroid.FunctionDef):
- # It's critical to use the original name,
- # since after inferring, an object can be something
- # else than expected, as in the case of the
- # following assignment.
- #
- # class A:
- # def keys(self): pass
- # __iter__ = keys
- abstract = is_abstract_cb(infered)
- if abstract:
- visited[obj.name] = infered
- elif not abstract and obj.name in visited:
- del visited[obj.name]
- return visited
-
-
-def _import_node_context(node):
- current = node
- ignores = (astroid.ExceptHandler, astroid.TryExcept)
- while current and not isinstance(current.parent, ignores):
- current = current.parent
-
- if current and isinstance(current.parent, ignores):
- return current.parent
- return None
-
-
-def is_from_fallback_block(node):
- """Check if the given node is from a fallback import block."""
- context = _import_node_context(node)
- if not context:
- return False
-
- if isinstance(context, astroid.ExceptHandler):
- other_body = context.parent.body
- handlers = context.parent.handlers
- else:
- other_body = itertools.chain.from_iterable(
- handler.body for handler in context.handlers)
- handlers = context.handlers
-
- has_fallback_imports = any(isinstance(import_node, (astroid.ImportFrom, astroid.Import))
- for import_node in other_body)
- ignores_import_error = _except_handlers_ignores_exception(handlers, ImportError)
- return ignores_import_error or has_fallback_imports
-
-
-def _except_handlers_ignores_exception(handlers, exception):
- func = functools.partial(error_of_type,
- error_type=(exception, ))
- return any(map(func, handlers))
-
-
-def node_ignores_exception(node, exception):
- """Check if the node is in a TryExcept which handles the given exception."""
- current = node
- ignores = (astroid.ExceptHandler, astroid.TryExcept)
- while current and not isinstance(current.parent, ignores):
- current = current.parent
-
- if current and isinstance(current.parent, astroid.TryExcept):
- return _except_handlers_ignores_exception(current.parent.handlers, exception)
- return False
-
-
-def class_is_abstract(node):
- """return true if the given class node should be considered as an abstract
- class
- """
- for method in node.methods():
- if method.parent.frame() is node:
- if method.is_abstract(pass_is_abstract=False):
- return True
- return False
-
-
-def _supports_protocol_method(value, attr):
- try:
- attributes = value.getattr(attr)
- except astroid.NotFoundError:
- return False
-
- first = attributes[0]
- if isinstance(first, astroid.AssignName):
- if isinstance(first.parent.value, astroid.Const):
- return False
- return True
-
-
-def is_comprehension(node):
- comprehensions = (astroid.ListComp,
- astroid.SetComp,
- astroid.DictComp,
- astroid.GeneratorExp)
- return isinstance(node, comprehensions)
-
-
-def _supports_mapping_protocol(value):
- return (
- _supports_protocol_method(value, GETITEM_METHOD)
- and _supports_protocol_method(value, KEYS_METHOD)
- )
-
-
-def _supports_membership_test_protocol(value):
- return _supports_protocol_method(value, CONTAINS_METHOD)
-
-
-def _supports_iteration_protocol(value):
- return (
- _supports_protocol_method(value, ITER_METHOD)
- or _supports_protocol_method(value, GETITEM_METHOD)
- )
-
-
-def _supports_getitem_protocol(value):
- return _supports_protocol_method(value, GETITEM_METHOD)
-
-
-def _supports_setitem_protocol(value):
- return _supports_protocol_method(value, SETITEM_METHOD)
-
-
-def _supports_delitem_protocol(value):
- return _supports_protocol_method(value, DELITEM_METHOD)
-
-
-def _is_abstract_class_name(name):
- lname = name.lower()
- is_mixin = lname.endswith('mixin')
- is_abstract = lname.startswith('abstract')
- is_base = lname.startswith('base') or lname.endswith('base')
- return is_mixin or is_abstract or is_base
-
-
-def is_inside_abstract_class(node):
- while node is not None:
- if isinstance(node, astroid.ClassDef):
- if class_is_abstract(node):
- return True
- name = getattr(node, 'name', None)
- if name is not None and _is_abstract_class_name(name):
- return True
- node = node.parent
- return False
-
-
-def _supports_protocol(value, protocol_callback):
- if isinstance(value, astroid.ClassDef):
- if not has_known_bases(value):
- return True
- # classobj can only be iterable if it has an iterable metaclass
- meta = value.metaclass()
- if meta is not None:
- if protocol_callback(meta):
- return True
- if isinstance(value, astroid.BaseInstance):
- if not has_known_bases(value):
- return True
- if protocol_callback(value):
- return True
-
- # TODO: this is not needed in astroid 2.0, where we can
- # check the type using a virtual base class instead.
- if (isinstance(value, _bases.Proxy)
- and isinstance(value._proxied, astroid.BaseInstance)
- and has_known_bases(value._proxied)):
- value = value._proxied
- return protocol_callback(value)
-
- return False
-
-
-def is_iterable(value):
- return _supports_protocol(value, _supports_iteration_protocol)
-
-
-def is_mapping(value):
- return _supports_protocol(value, _supports_mapping_protocol)
-
-
-def supports_membership_test(value):
- supported = _supports_protocol(value, _supports_membership_test_protocol)
- return supported or is_iterable(value)
-
-
-def supports_getitem(value):
- return _supports_protocol(value, _supports_getitem_protocol)
-
-
-def supports_setitem(value):
- return _supports_protocol(value, _supports_setitem_protocol)
-
-
-def supports_delitem(value):
- return _supports_protocol(value, _supports_delitem_protocol)
-
-
-# TODO(cpopa): deprecate these or leave them as aliases?
-@lru_cache(maxsize=1024)
-def safe_infer(node, context=None):
- """Return the inferred value for the given node.
-
- Return None if inference failed or if there is some ambiguity (more than
- one node has been inferred).
- """
- try:
- inferit = node.infer(context=context)
- value = next(inferit)
- except astroid.InferenceError:
- return
- try:
- next(inferit)
- return # None if there is ambiguity on the inferred node
- except astroid.InferenceError:
- return # there is some kind of ambiguity
- except StopIteration:
- return value
-
-
-def has_known_bases(klass, context=None):
- """Return true if all base classes of a class could be inferred."""
- try:
- return klass._all_bases_known
- except AttributeError:
- pass
- for base in klass.bases:
- result = safe_infer(base, context=context)
- # TODO: check for A->B->A->B pattern in class structure too?
- if (not isinstance(result, astroid.ClassDef) or
- result is klass or
- not has_known_bases(result, context=context)):
- klass._all_bases_known = False
- return False
- klass._all_bases_known = True
- return True
-
-
-def is_none(node):
- return (node is None or
- (isinstance(node, astroid.Const) and node.value is None) or
- (isinstance(node, astroid.Name) and node.name == 'None')
- )
-
-
-def node_type(node):
- """Return the inferred type for `node`
-
- If there is more than one possible type, or if inferred type is YES or None,
- return None
- """
- # check there is only one possible type for the assign node. Else we
- # don't handle it for now
- types = set()
- try:
- for var_type in node.infer():
- if var_type == astroid.YES or is_none(var_type):
- continue
- types.add(var_type)
- if len(types) > 1:
- return
- except astroid.InferenceError:
- return
- return types.pop() if types else None
-
-
-def is_registered_in_singledispatch_function(node):
- """Check if the given function node is a singledispatch function."""
-
- singledispatch_qnames = (
- 'functools.singledispatch',
- 'singledispatch.singledispatch'
- )
-
- if not isinstance(node, astroid.FunctionDef):
- return False
-
- decorators = node.decorators.nodes if node.decorators else []
- for decorator in decorators:
- # func.register are function calls
- if not isinstance(decorator, astroid.Call):
- continue
-
- func = decorator.func
- if not isinstance(func, astroid.Attribute) or func.attrname != 'register':
- continue
-
- try:
- func_def = next(func.expr.infer())
- except astroid.InferenceError:
- continue
-
- if isinstance(func_def, astroid.FunctionDef):
- return decorated_with(func_def, singledispatch_qnames)
-
- return False
diff --git a/pymode/libs/pylint/checkers/variables.py b/pymode/libs/pylint/checkers/variables.py
deleted file mode 100644
index e6becff2..00000000
--- a/pymode/libs/pylint/checkers/variables.py
+++ /dev/null
@@ -1,1324 +0,0 @@
-# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2011-2014 Google, Inc.
-# Copyright (c) 2013-2016 Claudiu Popa
-# Copyright (c) 2014 Michal Nowikowski
-# Copyright (c) 2015 Radu Ciorba
-# Copyright (c) 2015 Dmitry Pribysh
-# Copyright (c) 2016 Ashley Whetter
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""variables checkers for Python code
-"""
-import copy
-import itertools
-import os
-import sys
-import re
-try:
- from functools import lru_cache
-except ImportError:
- from backports.functools_lru_cache import lru_cache
-
-import six
-
-import astroid
-from astroid import decorators
-from astroid import modutils
-from pylint.interfaces import IAstroidChecker, INFERENCE, INFERENCE_FAILURE, HIGH
-from pylint.utils import get_global_option
-from pylint.checkers import BaseChecker
-from pylint.checkers import utils
-
-
-SPECIAL_OBJ = re.compile("^_{2}[a-z]+_{2}$")
-FUTURE = '__future__'
-# regexp for ignored argument name
-IGNORED_ARGUMENT_NAMES = re.compile('_.*|^ignored_|^unused_')
-PY3K = sys.version_info >= (3, 0)
-
-
-def _is_from_future_import(stmt, name):
- """Check if the name is a future import from another module."""
- try:
- module = stmt.do_import_module(stmt.modname)
- except astroid.AstroidBuildingException:
- return
-
- for local_node in module.locals.get(name, []):
- if (isinstance(local_node, astroid.ImportFrom)
- and local_node.modname == FUTURE):
- return True
-
-
-def in_for_else_branch(parent, stmt):
- """Returns True if stmt in inside the else branch for a parent For stmt."""
- return (isinstance(parent, astroid.For) and
- any(else_stmt.parent_of(stmt) or else_stmt == stmt
- for else_stmt in parent.orelse))
-
-
-@lru_cache(maxsize=1000)
-def overridden_method(klass, name):
- """get overridden method if any"""
- try:
- parent = next(klass.local_attr_ancestors(name))
- except (StopIteration, KeyError):
- return None
- try:
- meth_node = parent[name]
- except KeyError:
- # We have found an ancestor defining but it's not in the local
- # dictionary. This may happen with astroid built from living objects.
- return None
- if isinstance(meth_node, astroid.FunctionDef):
- return meth_node
- return None
-
-def _get_unpacking_extra_info(node, infered):
- """return extra information to add to the message for unpacking-non-sequence
- and unbalanced-tuple-unpacking errors
- """
- more = ''
- infered_module = infered.root().name
- if node.root().name == infered_module:
- if node.lineno == infered.lineno:
- more = ' %s' % infered.as_string()
- elif infered.lineno:
- more = ' defined at line %s' % infered.lineno
- elif infered.lineno:
- more = ' defined at line %s of %s' % (infered.lineno, infered_module)
- return more
-
-def _detect_global_scope(node, frame, defframe):
- """ Detect that the given frames shares a global
- scope.
-
- Two frames shares a global scope when neither
- of them are hidden under a function scope, as well
- as any of parent scope of them, until the root scope.
- In this case, depending from something defined later on
- will not work, because it is still undefined.
-
- Example:
- class A:
- # B has the same global scope as `C`, leading to a NameError.
- class B(C): ...
- class C: ...
-
- """
- def_scope = scope = None
- if frame and frame.parent:
- scope = frame.parent.scope()
- if defframe and defframe.parent:
- def_scope = defframe.parent.scope()
- if isinstance(frame, astroid.FunctionDef):
- # If the parent of the current node is a
- # function, then it can be under its scope
- # (defined in, which doesn't concern us) or
- # the `->` part of annotations. The same goes
- # for annotations of function arguments, they'll have
- # their parent the Arguments node.
- if not isinstance(node.parent,
- (astroid.FunctionDef, astroid.Arguments)):
- return False
- elif any(not isinstance(f, (astroid.ClassDef, astroid.Module))
- for f in (frame, defframe)):
- # Not interested in other frames, since they are already
- # not in a global scope.
- return False
-
- break_scopes = []
- for s in (scope, def_scope):
- # Look for parent scopes. If there is anything different
- # than a module or a class scope, then they frames don't
- # share a global scope.
- parent_scope = s
- while parent_scope:
- if not isinstance(parent_scope, (astroid.ClassDef, astroid.Module)):
- break_scopes.append(parent_scope)
- break
- if parent_scope.parent:
- parent_scope = parent_scope.parent.scope()
- else:
- break
- if break_scopes and len(set(break_scopes)) != 1:
- # Store different scopes than expected.
- # If the stored scopes are, in fact, the very same, then it means
- # that the two frames (frame and defframe) shares the same scope,
- # and we could apply our lineno analysis over them.
- # For instance, this works when they are inside a function, the node
- # that uses a definition and the definition itself.
- return False
- # At this point, we are certain that frame and defframe shares a scope
- # and the definition of the first depends on the second.
- return frame.lineno < defframe.lineno
-
-def _fix_dot_imports(not_consumed):
- """ Try to fix imports with multiple dots, by returning a dictionary
- with the import names expanded. The function unflattens root imports,
- like 'xml' (when we have both 'xml.etree' and 'xml.sax'), to 'xml.etree'
- and 'xml.sax' respectively.
- """
- # TODO: this should be improved in issue astroid #46
- names = {}
- for name, stmts in six.iteritems(not_consumed):
- if any(isinstance(stmt, astroid.AssignName)
- and isinstance(stmt.assign_type(), astroid.AugAssign)
- for stmt in stmts):
- continue
- for stmt in stmts:
- if not isinstance(stmt, (astroid.ImportFrom, astroid.Import)):
- continue
- for imports in stmt.names:
- second_name = None
- if imports[0] == "*":
- # In case of wildcard imports,
- # pick the name from inside the imported module.
- second_name = name
- else:
- if imports[0].find(".") > -1 or name in imports:
- # Most likely something like 'xml.etree',
- # which will appear in the .locals as 'xml'.
- # Only pick the name if it wasn't consumed.
- second_name = imports[0]
- if second_name and second_name not in names:
- names[second_name] = stmt
- return sorted(names.items(), key=lambda a: a[1].fromlineno)
-
-def _find_frame_imports(name, frame):
- """
- Detect imports in the frame, with the required
- *name*. Such imports can be considered assignments.
- Returns True if an import for the given name was found.
- """
- imports = frame.nodes_of_class((astroid.Import, astroid.ImportFrom))
- for import_node in imports:
- for import_name, import_alias in import_node.names:
- # If the import uses an alias, check only that.
- # Otherwise, check only the import name.
- if import_alias:
- if import_alias == name:
- return True
- elif import_name and import_name == name:
- return True
-
-
-def _import_name_is_global(stmt, global_names):
- for import_name, import_alias in stmt.names:
- # If the import uses an alias, check only that.
- # Otherwise, check only the import name.
- if import_alias:
- if import_alias in global_names:
- return True
- elif import_name in global_names:
- return True
- return False
-
-
-def _flattened_scope_names(iterator):
- values = (set(stmt.names) for stmt in iterator)
- return set(itertools.chain.from_iterable(values))
-
-
-def _assigned_locally(name_node):
- """
- Checks if name_node has corresponding assign statement in same scope
- """
- assign_stmts = name_node.scope().nodes_of_class(astroid.AssignName)
- return any(a.name == name_node.name for a in assign_stmts)
-
-
-MSGS = {
- 'E0601': ('Using variable %r before assignment',
- 'used-before-assignment',
- 'Used when a local variable is accessed before it\'s \
- assignment.'),
- 'E0602': ('Undefined variable %r',
- 'undefined-variable',
- 'Used when an undefined variable is accessed.'),
- 'E0603': ('Undefined variable name %r in __all__',
- 'undefined-all-variable',
- 'Used when an undefined variable name is referenced in __all__.'),
- 'E0604': ('Invalid object %r in __all__, must contain only strings',
- 'invalid-all-object',
- 'Used when an invalid (non-string) object occurs in __all__.'),
- 'E0611': ('No name %r in module %r',
- 'no-name-in-module',
- 'Used when a name cannot be found in a module.'),
-
- 'W0601': ('Global variable %r undefined at the module level',
- 'global-variable-undefined',
- 'Used when a variable is defined through the "global" statement \
- but the variable is not defined in the module scope.'),
- 'W0602': ('Using global for %r but no assignment is done',
- 'global-variable-not-assigned',
- 'Used when a variable is defined through the "global" statement \
- but no assignment to this variable is done.'),
- 'W0603': ('Using the global statement', # W0121
- 'global-statement',
- 'Used when you use the "global" statement to update a global \
- variable. Pylint just try to discourage this \
- usage. That doesn\'t mean you cannot use it !'),
- 'W0604': ('Using the global statement at the module level', # W0103
- 'global-at-module-level',
- 'Used when you use the "global" statement at the module level \
- since it has no effect'),
- 'W0611': ('Unused %s',
- 'unused-import',
- 'Used when an imported module or variable is not used.'),
- 'W0612': ('Unused variable %r',
- 'unused-variable',
- 'Used when a variable is defined but not used.'),
- 'W0613': ('Unused argument %r',
- 'unused-argument',
- 'Used when a function or method argument is not used.'),
- 'W0614': ('Unused import %s from wildcard import',
- 'unused-wildcard-import',
- 'Used when an imported module or variable is not used from a \
- `\'from X import *\'` style import.'),
-
- 'W0621': ('Redefining name %r from outer scope (line %s)',
- 'redefined-outer-name',
- 'Used when a variable\'s name hide a name defined in the outer \
- scope.'),
- 'W0622': ('Redefining built-in %r',
- 'redefined-builtin',
- 'Used when a variable or function override a built-in.'),
- 'W0623': ('Redefining name %r from %s in exception handler',
- 'redefine-in-handler',
- 'Used when an exception handler assigns the exception \
- to an existing name'),
-
- 'W0631': ('Using possibly undefined loop variable %r',
- 'undefined-loop-variable',
- 'Used when an loop variable (i.e. defined by a for loop or \
- a list comprehension or a generator expression) is used outside \
- the loop.'),
-
- 'E0632': ('Possible unbalanced tuple unpacking with '
- 'sequence%s: '
- 'left side has %d label(s), right side has %d value(s)',
- 'unbalanced-tuple-unpacking',
- 'Used when there is an unbalanced tuple unpacking in assignment',
- {'old_names': [('W0632', 'unbalanced-tuple-unpacking')]}),
-
- 'E0633': ('Attempting to unpack a non-sequence%s',
- 'unpacking-non-sequence',
- 'Used when something which is not '
- 'a sequence is used in an unpack assignment',
- {'old_names': [('W0633', 'unpacking-non-sequence')]}),
-
- 'W0640': ('Cell variable %s defined in loop',
- 'cell-var-from-loop',
- 'A variable used in a closure is defined in a loop. '
- 'This will result in all closures using the same value for '
- 'the closed-over variable.'),
-
- }
-
-class VariablesChecker(BaseChecker):
- """checks for
- * unused variables / imports
- * undefined variables
- * redefinition of variable from builtins or from an outer scope
- * use of variable before assignment
- * __all__ consistency
- """
-
- __implements__ = IAstroidChecker
-
- name = 'variables'
- msgs = MSGS
- priority = -1
- options = (("init-import",
- {'default': 0, 'type' : 'yn', 'metavar' : '',
- 'help' : 'Tells whether we should check for unused import in '
- '__init__ files.'}),
- ("dummy-variables-rgx",
- {'default': '_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_',
- 'type' :'regexp', 'metavar' : '',
- 'help' : 'A regular expression matching the name of dummy '
- 'variables (i.e. expectedly not used).'}),
- ("additional-builtins",
- {'default': (), 'type' : 'csv',
- 'metavar' : '',
- 'help' : 'List of additional names supposed to be defined in '
- 'builtins. Remember that you should avoid to define new builtins '
- 'when possible.'
- }),
- ("callbacks",
- {'default' : ('cb_', '_cb'), 'type' : 'csv',
- 'metavar' : '',
- 'help' : 'List of strings which can identify a callback '
- 'function by name. A callback name must start or '
- 'end with one of those strings.'}
- ),
- ("redefining-builtins-modules",
- {'default': ('six.moves', 'future.builtins'), 'type': 'csv',
- 'metavar': '',
- 'help': 'List of qualified module names which can have objects '
- 'that can redefine builtins.'}
- ),
- ('ignored-argument-names',
- {'default' : IGNORED_ARGUMENT_NAMES,
- 'type' :'regexp', 'metavar' : '',
- 'help' : 'Argument names that match this expression will be '
- 'ignored. Default to name with leading underscore'}
- ),
- ('allow-global-unused-variables',
- {'default': True,
- 'type': 'yn', 'metavar': '',
- 'help': 'Tells whether unused global variables should be treated as a violation.'}
- ),
- )
-
- def __init__(self, linter=None):
- BaseChecker.__init__(self, linter)
- self._to_consume = None # list of tuples: (to_consume:dict, consumed:dict, scope_type:str)
- self._checking_mod_attr = None
- self._loop_variables = []
-
- # Relying on other checker's options, which might not have been initialized yet.
- @decorators.cachedproperty
- def _analyse_fallback_blocks(self):
- return get_global_option(self, 'analyse-fallback-blocks', default=False)
-
- @decorators.cachedproperty
- def _ignored_modules(self):
- return get_global_option(self, 'ignored-modules', default=[])
-
- @decorators.cachedproperty
- def _allow_global_unused_variables(self):
- return get_global_option(self, 'allow-global-unused-variables', default=True)
-
- @utils.check_messages('redefined-outer-name')
- def visit_for(self, node):
- assigned_to = [var.name for var in node.target.nodes_of_class(astroid.AssignName)]
-
- # Only check variables that are used
- dummy_rgx = self.config.dummy_variables_rgx
- assigned_to = [var for var in assigned_to if not dummy_rgx.match(var)]
-
- for variable in assigned_to:
- for outer_for, outer_variables in self._loop_variables:
- if (variable in outer_variables
- and not in_for_else_branch(outer_for, node)):
- self.add_message(
- 'redefined-outer-name',
- args=(variable, outer_for.fromlineno),
- node=node
- )
- break
-
- self._loop_variables.append((node, assigned_to))
-
- @utils.check_messages('redefined-outer-name')
- def leave_for(self, _):
- self._loop_variables.pop()
-
- def visit_module(self, node):
- """visit module : update consumption analysis variable
- checks globals doesn't overrides builtins
- """
- self._to_consume = [(copy.copy(node.locals), {}, 'module')]
- for name, stmts in six.iteritems(node.locals):
- if utils.is_builtin(name) and not utils.is_inside_except(stmts[0]):
- if self._should_ignore_redefined_builtin(stmts[0]):
- continue
- self.add_message('redefined-builtin', args=name, node=stmts[0])
-
- @utils.check_messages('unused-import', 'unused-wildcard-import',
- 'redefined-builtin', 'undefined-all-variable',
- 'invalid-all-object', 'unused-variable')
- def leave_module(self, node):
- """leave module: check globals
- """
- assert len(self._to_consume) == 1
- not_consumed = self._to_consume.pop()[0]
- # attempt to check for __all__ if defined
- if '__all__' in node.locals:
- self._check_all(node, not_consumed)
-
- # check for unused globals
- self._check_globals(not_consumed)
-
- # don't check unused imports in __init__ files
- if not self.config.init_import and node.package:
- return
-
- self._check_imports(not_consumed)
-
- def _check_all(self, node, not_consumed):
- assigned = next(node.igetattr('__all__'))
- if assigned is astroid.YES:
- return
-
- for elt in getattr(assigned, 'elts', ()):
- try:
- elt_name = next(elt.infer())
- except astroid.InferenceError:
- continue
- if elt_name is astroid.YES:
- continue
- if not elt_name.parent:
- continue
-
- if (not isinstance(elt_name, astroid.Const)
- or not isinstance(elt_name.value, six.string_types)):
- self.add_message('invalid-all-object',
- args=elt.as_string(), node=elt)
- continue
-
- elt_name = elt_name.value
- # If elt is in not_consumed, remove it from not_consumed
- if elt_name in not_consumed:
- del not_consumed[elt_name]
- continue
-
- if elt_name not in node.locals:
- if not node.package:
- self.add_message('undefined-all-variable',
- args=(elt_name, ),
- node=elt)
- else:
- basename = os.path.splitext(node.file)[0]
- if os.path.basename(basename) == '__init__':
- name = node.name + "." + elt_name
- try:
- modutils.file_from_modpath(name.split("."))
- except ImportError:
- self.add_message('undefined-all-variable',
- args=(elt_name, ),
- node=elt)
- except SyntaxError:
- # don't yield an syntax-error warning,
- # because it will be later yielded
- # when the file will be checked
- pass
-
- def _check_globals(self, not_consumed):
- if self._allow_global_unused_variables:
- return
- for name, nodes in six.iteritems(not_consumed):
- for node in nodes:
- self.add_message('unused-variable', args=(name,), node=node)
-
- def _check_imports(self, not_consumed):
- local_names = _fix_dot_imports(not_consumed)
- checked = set()
- for name, stmt in local_names:
- for imports in stmt.names:
- real_name = imported_name = imports[0]
- if imported_name == "*":
- real_name = name
- as_name = imports[1]
- if real_name in checked:
- continue
- if name not in (real_name, as_name):
- continue
- checked.add(real_name)
-
- if (isinstance(stmt, astroid.Import) or
- (isinstance(stmt, astroid.ImportFrom) and
- not stmt.modname)):
- if (isinstance(stmt, astroid.ImportFrom) and
- SPECIAL_OBJ.search(imported_name)):
- # Filter special objects (__doc__, __all__) etc.,
- # because they can be imported for exporting.
- continue
- if as_name == "_":
- continue
- if as_name is None:
- msg = "import %s" % imported_name
- else:
- msg = "%s imported as %s" % (imported_name, as_name)
- self.add_message('unused-import', args=msg, node=stmt)
- elif (isinstance(stmt, astroid.ImportFrom)
- and stmt.modname != FUTURE):
-
- if SPECIAL_OBJ.search(imported_name):
- # Filter special objects (__doc__, __all__) etc.,
- # because they can be imported for exporting.
- continue
-
- if _is_from_future_import(stmt, name):
- # Check if the name is in fact loaded from a
- # __future__ import in another module.
- continue
-
- if imported_name == '*':
- self.add_message('unused-wildcard-import',
- args=name, node=stmt)
- else:
- if as_name is None:
- msg = "%s imported from %s" % (imported_name, stmt.modname)
- else:
- fields = (imported_name, stmt.modname, as_name)
- msg = "%s imported from %s as %s" % fields
- self.add_message('unused-import', args=msg, node=stmt)
- del self._to_consume
-
- def visit_classdef(self, node):
- """visit class: update consumption analysis variable
- """
- self._to_consume.append((copy.copy(node.locals), {}, 'class'))
-
- def leave_classdef(self, _):
- """leave class: update consumption analysis variable
- """
- # do not check for not used locals here (no sense)
- self._to_consume.pop()
-
- def visit_lambda(self, node):
- """visit lambda: update consumption analysis variable
- """
- self._to_consume.append((copy.copy(node.locals), {}, 'lambda'))
-
- def leave_lambda(self, _):
- """leave lambda: update consumption analysis variable
- """
- # do not check for not used locals here
- self._to_consume.pop()
-
- def visit_generatorexp(self, node):
- """visit genexpr: update consumption analysis variable
- """
- self._to_consume.append((copy.copy(node.locals), {}, 'comprehension'))
-
- def leave_generatorexp(self, _):
- """leave genexpr: update consumption analysis variable
- """
- # do not check for not used locals here
- self._to_consume.pop()
-
- def visit_dictcomp(self, node):
- """visit dictcomp: update consumption analysis variable
- """
- self._to_consume.append((copy.copy(node.locals), {}, 'comprehension'))
-
- def leave_dictcomp(self, _):
- """leave dictcomp: update consumption analysis variable
- """
- # do not check for not used locals here
- self._to_consume.pop()
-
- def visit_setcomp(self, node):
- """visit setcomp: update consumption analysis variable
- """
- self._to_consume.append((copy.copy(node.locals), {}, 'comprehension'))
-
- def leave_setcomp(self, _):
- """leave setcomp: update consumption analysis variable
- """
- # do not check for not used locals here
- self._to_consume.pop()
-
- def visit_functiondef(self, node):
- """visit function: update consumption analysis variable and check locals
- """
- self._to_consume.append((copy.copy(node.locals), {}, 'function'))
- if not (self.linter.is_message_enabled('redefined-outer-name') or
- self.linter.is_message_enabled('redefined-builtin')):
- return
- globs = node.root().globals
- for name, stmt in node.items():
- if utils.is_inside_except(stmt):
- continue
- if name in globs and not isinstance(stmt, astroid.Global):
- definition = globs[name][0]
- if (isinstance(definition, astroid.ImportFrom)
- and definition.modname == FUTURE):
- # It is a __future__ directive, not a symbol.
- continue
-
- line = definition.fromlineno
- dummy_rgx = self.config.dummy_variables_rgx
- if not dummy_rgx.match(name):
- self.add_message('redefined-outer-name',
- args=(name, line), node=stmt)
-
- elif utils.is_builtin(name) and not self._should_ignore_redefined_builtin(stmt):
- # do not print Redefining builtin for additional builtins
- self.add_message('redefined-builtin', args=name, node=stmt)
-
- def _is_name_ignored(self, stmt, name):
- authorized_rgx = self.config.dummy_variables_rgx
- if (isinstance(stmt, astroid.AssignName)
- and isinstance(stmt.parent, astroid.Arguments)):
- regex = self.config.ignored_argument_names
- else:
- regex = authorized_rgx
- return regex and regex.match(name)
-
- def _check_is_unused(self, name, node, stmt, global_names, nonlocal_names):
- # Ignore some special names specified by user configuration.
- if self._is_name_ignored(stmt, name):
- return
-
- # Ignore names imported by the global statement.
- # FIXME: should only ignore them if it's assigned latter
- if isinstance(stmt, astroid.Global):
- return
- if isinstance(stmt, (astroid.Import, astroid.ImportFrom)):
- # Detect imports, assigned to global statements.
- if global_names and _import_name_is_global(stmt, global_names):
- return
-
- argnames = list(itertools.chain(
- node.argnames(),
- [arg.name for arg in node.args.kwonlyargs]
- ))
- is_method = node.is_method()
- klass = node.parent.frame()
- if is_method and isinstance(klass, astroid.ClassDef):
- confidence = INFERENCE if utils.has_known_bases(klass) else INFERENCE_FAILURE
- else:
- confidence = HIGH
-
- # Care about functions with unknown argument (builtins)
- if name in argnames:
- if is_method:
- # Don't warn for the first argument of a (non static) method
- if node.type != 'staticmethod' and name == argnames[0]:
- return
- # Don't warn for argument of an overridden method
- overridden = overridden_method(klass, node.name)
- if overridden is not None and name in overridden.argnames():
- return
- if node.name in utils.PYMETHODS and node.name not in ('__init__', '__new__'):
- return
- # Don't check callback arguments
- if any(node.name.startswith(cb) or node.name.endswith(cb)
- for cb in self.config.callbacks):
- return
- # Don't check arguments of singledispatch.register function.
- if utils.is_registered_in_singledispatch_function(node):
- return
- self.add_message('unused-argument', args=name, node=stmt,
- confidence=confidence)
- else:
- if stmt.parent and isinstance(stmt.parent, astroid.Assign):
- if name in nonlocal_names:
- return
-
- if isinstance(stmt, astroid.Import):
- # Need the complete name, which we don't have in .locals.
- qname, asname = stmt.names[0]
- name = asname or qname
-
- self.add_message('unused-variable', args=name, node=stmt)
-
- def leave_functiondef(self, node):
- """leave function: check function's locals are consumed"""
- not_consumed = self._to_consume.pop()[0]
- if not (self.linter.is_message_enabled('unused-variable') or
- self.linter.is_message_enabled('unused-argument')):
- return
-
- # Don't check arguments of function which are only raising an exception.
- if utils.is_error(node):
- return
-
- # Don't check arguments of abstract methods or within an interface.
- is_method = node.is_method()
- if is_method and node.is_abstract():
- return
-
- global_names = _flattened_scope_names(node.nodes_of_class(astroid.Global))
- nonlocal_names = _flattened_scope_names(node.nodes_of_class(astroid.Nonlocal))
- for name, stmts in six.iteritems(not_consumed):
- self._check_is_unused(name, node, stmts[0], global_names, nonlocal_names)
-
- visit_asyncfunctiondef = visit_functiondef
- leave_asyncfunctiondef = leave_functiondef
-
- @utils.check_messages('global-variable-undefined', 'global-variable-not-assigned',
- 'global-statement', 'global-at-module-level',
- 'redefined-builtin')
- def visit_global(self, node):
- """check names imported exists in the global scope"""
- frame = node.frame()
- if isinstance(frame, astroid.Module):
- self.add_message('global-at-module-level', node=node)
- return
-
- module = frame.root()
- default_message = True
- for name in node.names:
- try:
- assign_nodes = module.getattr(name)
- except astroid.NotFoundError:
- # unassigned global, skip
- assign_nodes = []
-
- if not assign_nodes:
- self.add_message('global-variable-not-assigned',
- args=name, node=node)
- default_message = False
- continue
-
- for anode in assign_nodes:
- if (isinstance(anode, astroid.AssignName)
- and anode.name in module.special_attributes):
- self.add_message('redefined-builtin', args=name, node=node)
- break
- if anode.frame() is module:
- # module level assignment
- break
- else:
- # global undefined at the module scope
- self.add_message('global-variable-undefined', args=name, node=node)
- default_message = False
-
- if default_message:
- self.add_message('global-statement', node=node)
-
- def _check_late_binding_closure(self, node, assignment_node):
- def _is_direct_lambda_call():
- return (isinstance(node_scope.parent, astroid.Call)
- and node_scope.parent.func is node_scope)
-
- node_scope = node.scope()
- if not isinstance(node_scope, (astroid.Lambda, astroid.FunctionDef)):
- return
- if isinstance(node.parent, astroid.Arguments):
- return
-
- if isinstance(assignment_node, astroid.Comprehension):
- if assignment_node.parent.parent_of(node.scope()):
- self.add_message('cell-var-from-loop', node=node, args=node.name)
- else:
- assign_scope = assignment_node.scope()
- maybe_for = assignment_node
- while not isinstance(maybe_for, astroid.For):
- if maybe_for is assign_scope:
- break
- maybe_for = maybe_for.parent
- else:
- if (maybe_for.parent_of(node_scope)
- and not _is_direct_lambda_call()
- and not isinstance(node_scope.statement(), astroid.Return)):
- self.add_message('cell-var-from-loop', node=node, args=node.name)
-
- def _loopvar_name(self, node, name):
- # filter variables according to node's scope
- # XXX used to filter parents but don't remember why, and removing this
- # fixes a W0631 false positive reported by Paul Hachmann on 2008/12 on
- # python-projects (added to func_use_for_or_listcomp_var test)
- #astmts = [stmt for stmt in node.lookup(name)[1]
- # if hasattr(stmt, 'ass_type')] and
- # not stmt.statement().parent_of(node)]
- if not self.linter.is_message_enabled('undefined-loop-variable'):
- return
- astmts = [stmt for stmt in node.lookup(name)[1]
- if hasattr(stmt, 'ass_type')]
- # filter variables according their respective scope test is_statement
- # and parent to avoid #74747. This is not a total fix, which would
- # introduce a mechanism similar to special attribute lookup in
- # modules. Also, in order to get correct inference in this case, the
- # scope lookup rules would need to be changed to return the initial
- # assignment (which does not exist in code per se) as well as any later
- # modifications.
- if not astmts or (astmts[0].is_statement or astmts[0].parent) \
- and astmts[0].statement().parent_of(node):
- _astmts = []
- else:
- _astmts = astmts[:1]
- for i, stmt in enumerate(astmts[1:]):
- if (astmts[i].statement().parent_of(stmt)
- and not in_for_else_branch(astmts[i].statement(), stmt)):
- continue
- _astmts.append(stmt)
- astmts = _astmts
- if len(astmts) == 1:
- assign = astmts[0].assign_type()
- if (isinstance(assign, (astroid.For, astroid.Comprehension,
- astroid.GeneratorExp))
- and assign.statement() is not node.statement()):
- self.add_message('undefined-loop-variable', args=name, node=node)
-
- def _should_ignore_redefined_builtin(self, stmt):
- if not isinstance(stmt, astroid.ImportFrom):
- return False
- return stmt.modname in self.config.redefining_builtins_modules
-
- @utils.check_messages('redefine-in-handler')
- def visit_excepthandler(self, node):
- for name in utils.get_all_elements(node.name):
- clobbering, args = utils.clobber_in_except(name)
- if clobbering:
- self.add_message('redefine-in-handler', args=args, node=name)
-
- def visit_assignname(self, node):
- if isinstance(node.assign_type(), astroid.AugAssign):
- self.visit_name(node)
-
- def visit_delname(self, node):
- self.visit_name(node)
-
- @staticmethod
- def _defined_in_function_definition(node, frame):
- in_annotation_or_default = False
- if (isinstance(frame, astroid.FunctionDef) and
- node.statement() is frame):
- in_annotation_or_default = (
- (
- PY3K and (node in frame.args.annotations
- or node in frame.args.kwonlyargs_annotations
- or node is frame.args.varargannotation
- or node is frame.args.kwargannotation)
- )
- or
- frame.args.parent_of(node)
- )
- return in_annotation_or_default
-
- @staticmethod
- def _next_to_consume(node, name, to_consume):
- # mark the name as consumed if it's defined in this scope
- found_node = to_consume.get(name)
- if (found_node
- and isinstance(node.parent, astroid.Assign)
- and node.parent == found_node[0].parent):
- lhs = found_node[0].parent.targets[0]
- if lhs.name == name: # this name is defined in this very statement
- found_node = None
- return found_node
-
- @staticmethod
- def _is_variable_violation(node, name, defnode, stmt, defstmt,
- frame, defframe, base_scope_type,
- recursive_klass):
- maybee0601 = True
- annotation_return = False
- use_outer_definition = False
- if frame is not defframe:
- maybee0601 = _detect_global_scope(node, frame, defframe)
- elif defframe.parent is None:
- # we are at the module level, check the name is not
- # defined in builtins
- if name in defframe.scope_attrs or astroid.builtin_lookup(name)[1]:
- maybee0601 = False
- else:
- # we are in a local scope, check the name is not
- # defined in global or builtin scope
- # skip this lookup if name is assigned later in function scope
- forbid_lookup = isinstance(frame, astroid.FunctionDef) and _assigned_locally(node)
- if not forbid_lookup and defframe.root().lookup(name)[1]:
- maybee0601 = False
- use_outer_definition = (
- stmt == defstmt
- and not isinstance(defnode, astroid.node_classes.Comprehension)
- )
- else:
- # check if we have a nonlocal
- if name in defframe.locals:
- maybee0601 = not any(isinstance(child, astroid.Nonlocal)
- and name in child.names
- for child in defframe.get_children())
-
- if (base_scope_type == 'lambda' and
- isinstance(frame, astroid.ClassDef)
- and name in frame.locals):
-
- # This rule verifies that if the definition node of the
- # checked name is an Arguments node and if the name
- # is used a default value in the arguments defaults
- # and the actual definition of the variable label
- # is happening before the Arguments definition.
- #
- # bar = None
- # foo = lambda bar=bar: bar
- #
- # In this case, maybee0601 should be False, otherwise
- # it should be True.
- maybee0601 = not (isinstance(defnode, astroid.Arguments) and
- node in defnode.defaults and
- frame.locals[name][0].fromlineno < defstmt.fromlineno)
- elif (isinstance(defframe, astroid.ClassDef) and
- isinstance(frame, astroid.FunctionDef)):
- # Special rule for function return annotations,
- # which uses the same name as the class where
- # the function lives.
- if (PY3K and node is frame.returns and
- defframe.parent_of(frame.returns)):
- maybee0601 = annotation_return = True
-
- if (maybee0601 and defframe.name in defframe.locals and
- defframe.locals[name][0].lineno < frame.lineno):
- # Detect class assignments with the same
- # name as the class. In this case, no warning
- # should be raised.
- maybee0601 = False
- if isinstance(node.parent, astroid.Arguments):
- maybee0601 = stmt.fromlineno <= defstmt.fromlineno
- elif recursive_klass:
- maybee0601 = True
- else:
- maybee0601 = maybee0601 and stmt.fromlineno <= defstmt.fromlineno
- if maybee0601 and stmt.fromlineno == defstmt.fromlineno:
- if (isinstance(defframe, astroid.FunctionDef)
- and frame is defframe
- and defframe.parent_of(node)
- and stmt is not defstmt):
- # Single statement function, with the statement on the
- # same line as the function definition
- maybee0601 = False
-
- return maybee0601, annotation_return, use_outer_definition
-
- def _ignore_class_scope(self, node, name, frame):
- # Detect if we are in a local class scope, as an assignment.
- # For example, the following is fair game.
- #
- # class A:
- # b = 1
- # c = lambda b=b: b * b
- #
- # class B:
- # tp = 1
- # def func(self, arg: tp):
- # ...
- # class C:
- # tp = 2
- # def func(self, arg=tp):
- # ...
-
- in_annotation_or_default = self._defined_in_function_definition(
- node, frame)
- if in_annotation_or_default:
- frame_locals = frame.parent.scope().locals
- else:
- frame_locals = frame.locals
- return not ((isinstance(frame, astroid.ClassDef) or
- in_annotation_or_default) and
- name in frame_locals)
-
- @utils.check_messages(*(MSGS.keys()))
- def visit_name(self, node):
- """check that a name is defined if the current scope and doesn't
- redefine a built-in
- """
- stmt = node.statement()
- if stmt.fromlineno is None:
- # name node from a astroid built from live code, skip
- assert not stmt.root().file.endswith('.py')
- return
- name = node.name
- frame = stmt.scope()
- # if the name node is used as a function default argument's value or as
- # a decorator, then start from the parent frame of the function instead
- # of the function frame - and thus open an inner class scope
- if (utils.is_func_default(node) or utils.is_func_decorator(node)
- or utils.is_ancestor_name(frame, node)):
- start_index = len(self._to_consume) - 2
- else:
- start_index = len(self._to_consume) - 1
- # iterates through parent scopes, from the inner to the outer
- base_scope_type = self._to_consume[start_index][-1]
- # pylint: disable=too-many-nested-blocks; refactoring this block is a pain.
- for i in range(start_index, -1, -1):
- to_consume, consumed, scope_type = self._to_consume[i]
- # if the current scope is a class scope but it's not the inner
- # scope, ignore it. This prevents to access this scope instead of
- # the globals one in function members when there are some common
- # names. The only exception is when the starting scope is a
- # comprehension and its direct outer scope is a class
- if scope_type == 'class' and i != start_index and not (
- base_scope_type == 'comprehension' and i == start_index-1):
- if self._ignore_class_scope(node, name, frame):
- continue
-
- # the name has already been consumed, only check it's not a loop
- # variable used outside the loop
- if name in consumed:
- defnode = utils.assign_parent(consumed[name][0])
- self._check_late_binding_closure(node, defnode)
- self._loopvar_name(node, name)
- break
- found_node = self._next_to_consume(node, name, to_consume)
- if found_node is None:
- continue
- # checks for use before assignment
- defnode = utils.assign_parent(to_consume[name][0])
- if defnode is not None:
- self._check_late_binding_closure(node, defnode)
- defstmt = defnode.statement()
- defframe = defstmt.frame()
- # The class reuses itself in the class scope.
- recursive_klass = (frame is defframe and
- defframe.parent_of(node) and
- isinstance(defframe, astroid.ClassDef) and
- node.name == defframe.name)
-
- maybee0601, annotation_return, use_outer_definition = self._is_variable_violation(
- node, name, defnode, stmt, defstmt,
- frame, defframe,
- base_scope_type, recursive_klass)
-
- if use_outer_definition:
- continue
-
- if (maybee0601
- and not utils.is_defined_before(node)
- and not astroid.are_exclusive(stmt, defstmt, ('NameError',))):
-
- # Used and defined in the same place, e.g `x += 1` and `del x`
- defined_by_stmt = (
- defstmt is stmt
- and isinstance(node, (astroid.DelName, astroid.AssignName))
- )
- if (recursive_klass
- or defined_by_stmt
- or annotation_return
- or isinstance(defstmt, astroid.Delete)):
- if not utils.node_ignores_exception(node, NameError):
- self.add_message('undefined-variable', args=name,
- node=node)
- elif base_scope_type != 'lambda':
- # E0601 may *not* occurs in lambda scope.
- self.add_message('used-before-assignment', args=name, node=node)
- elif base_scope_type == 'lambda':
- # E0601 can occur in class-level scope in lambdas, as in
- # the following example:
- # class A:
- # x = lambda attr: f + attr
- # f = 42
- if isinstance(frame, astroid.ClassDef) and name in frame.locals:
- if isinstance(node.parent, astroid.Arguments):
- if stmt.fromlineno <= defstmt.fromlineno:
- # Doing the following is fine:
- # class A:
- # x = 42
- # y = lambda attr=x: attr
- self.add_message('used-before-assignment',
- args=name, node=node)
- else:
- self.add_message('undefined-variable',
- args=name, node=node)
- elif scope_type == 'lambda':
- self.add_message('undefined-variable',
- node=node, args=name)
-
- consumed[name] = found_node
- del to_consume[name]
- # check it's not a loop variable used outside the loop
- self._loopvar_name(node, name)
- break
- else:
- # we have not found the name, if it isn't a builtin, that's an
- # undefined name !
- if not (name in astroid.Module.scope_attrs or utils.is_builtin(name)
- or name in self.config.additional_builtins):
- if not utils.node_ignores_exception(node, NameError):
- self.add_message('undefined-variable', args=name, node=node)
-
- @utils.check_messages('no-name-in-module')
- def visit_import(self, node):
- """check modules attribute accesses"""
- if not self._analyse_fallback_blocks and utils.is_from_fallback_block(node):
- # No need to verify this, since ImportError is already
- # handled by the client code.
- return
-
- for name, _ in node.names:
- parts = name.split('.')
- try:
- module = next(node.infer_name_module(parts[0]))
- except astroid.ResolveError:
- continue
- self._check_module_attrs(node, module, parts[1:])
-
- @utils.check_messages('no-name-in-module')
- def visit_importfrom(self, node):
- """check modules attribute accesses"""
- if not self._analyse_fallback_blocks and utils.is_from_fallback_block(node):
- # No need to verify this, since ImportError is already
- # handled by the client code.
- return
-
- name_parts = node.modname.split('.')
- try:
- module = node.do_import_module(name_parts[0])
- except astroid.AstroidBuildingException:
- return
- module = self._check_module_attrs(node, module, name_parts[1:])
- if not module:
- return
- for name, _ in node.names:
- if name == '*':
- continue
- self._check_module_attrs(node, module, name.split('.'))
-
- @utils.check_messages('unbalanced-tuple-unpacking', 'unpacking-non-sequence')
- def visit_assign(self, node):
- """Check unbalanced tuple unpacking for assignments
- and unpacking non-sequences.
- """
- if not isinstance(node.targets[0], (astroid.Tuple, astroid.List)):
- return
-
- targets = node.targets[0].itered()
- try:
- infered = utils.safe_infer(node.value)
- if infered is not None:
- self._check_unpacking(infered, node, targets)
- except astroid.InferenceError:
- return
-
- def _check_unpacking(self, infered, node, targets):
- """ Check for unbalanced tuple unpacking
- and unpacking non sequences.
- """
- if utils.is_inside_abstract_class(node):
- return
- if utils.is_comprehension(node):
- return
- if infered is astroid.YES:
- return
- if (isinstance(infered.parent, astroid.Arguments) and
- isinstance(node.value, astroid.Name) and
- node.value.name == infered.parent.vararg):
- # Variable-length argument, we can't determine the length.
- return
- if isinstance(infered, (astroid.Tuple, astroid.List)):
- # attempt to check unpacking is properly balanced
- values = infered.itered()
- if len(targets) != len(values):
- # Check if we have starred nodes.
- if any(isinstance(target, astroid.Starred)
- for target in targets):
- return
- self.add_message('unbalanced-tuple-unpacking', node=node,
- args=(_get_unpacking_extra_info(node, infered),
- len(targets),
- len(values)))
- # attempt to check unpacking may be possible (ie RHS is iterable)
- else:
- if not utils.is_iterable(infered):
- self.add_message('unpacking-non-sequence', node=node,
- args=(_get_unpacking_extra_info(node, infered),))
-
-
- def _check_module_attrs(self, node, module, module_names):
- """check that module_names (list of string) are accessible through the
- given module
- if the latest access name corresponds to a module, return it
- """
- assert isinstance(module, astroid.Module), module
- while module_names:
- name = module_names.pop(0)
- if name == '__dict__':
- module = None
- break
- try:
- module = next(module.getattr(name)[0].infer())
- if module is astroid.YES:
- return None
- except astroid.NotFoundError:
- if module.name in self._ignored_modules:
- return None
- self.add_message('no-name-in-module',
- args=(name, module.name), node=node)
- return None
- except astroid.InferenceError:
- return None
- if module_names:
- # FIXME: other message if name is not the latest part of
- # module_names ?
- modname = module.name if module else '__dict__'
- self.add_message('no-name-in-module', node=node,
- args=('.'.join(module_names), modname))
- return None
- if isinstance(module, astroid.Module):
- return module
- return None
-
-
-class VariablesChecker3k(VariablesChecker):
- '''Modified variables checker for 3k'''
- # listcomp have now also their scope
-
- def visit_listcomp(self, node):
- """visit dictcomp: update consumption analysis variable
- """
- self._to_consume.append((copy.copy(node.locals), {}, 'comprehension'))
-
- def leave_listcomp(self, _):
- """leave dictcomp: update consumption analysis variable
- """
- # do not check for not used locals here
- self._to_consume.pop()
-
- def leave_functiondef(self, node):
- self._check_metaclasses(node)
- super(VariablesChecker3k, self).leave_functiondef(node)
-
- def leave_module(self, node):
- self._check_metaclasses(node)
- super(VariablesChecker3k, self).leave_module(node)
-
- def _check_metaclasses(self, node):
- """ Update consumption analysis for metaclasses. """
- consumed = [] # [(scope_locals, consumed_key)]
-
- for child_node in node.get_children():
- if isinstance(child_node, astroid.ClassDef):
- consumed.extend(self._check_classdef_metaclasses(child_node, node))
-
- # Pop the consumed items, in order to avoid having
- # unused-import and unused-variable false positives
- for scope_locals, name in consumed:
- scope_locals.pop(name, None)
-
- def _check_classdef_metaclasses(self, klass, parent_node):
- if not klass._metaclass:
- # Skip if this class doesn't use explicitly a metaclass, but inherits it from ancestors
- return []
-
- consumed = [] # [(scope_locals, consumed_key)]
- metaclass = klass.metaclass()
-
- name = None
- if isinstance(klass._metaclass, astroid.Name):
- name = klass._metaclass.name
- elif metaclass:
- name = metaclass.root().name
-
- found = None
- if name:
- # check enclosing scopes starting from most local
- for scope_locals, _, _ in self._to_consume[::-1]:
- found = scope_locals.get(name)
- if found:
- consumed.append((scope_locals, name))
- break
-
- if found is None and not metaclass:
- name = None
- if isinstance(klass._metaclass, astroid.Name):
- name = klass._metaclass.name
- elif isinstance(klass._metaclass, astroid.Attribute):
- name = klass._metaclass.as_string()
-
- if name is not None:
- if not (name in astroid.Module.scope_attrs or
- utils.is_builtin(name) or
- name in self.config.additional_builtins or
- name in parent_node.locals):
- self.add_message('undefined-variable',
- node=klass,
- args=(name,))
-
- return consumed
-
-
-if sys.version_info >= (3, 0):
- VariablesChecker = VariablesChecker3k
-
-
-def register(linter):
- """required method to auto register this checker"""
- linter.register_checker(VariablesChecker(linter))
diff --git a/pymode/libs/pylint/config.py b/pymode/libs/pylint/config.py
deleted file mode 100644
index 6922949f..00000000
--- a/pymode/libs/pylint/config.py
+++ /dev/null
@@ -1,831 +0,0 @@
-# Copyright (c) 2006-2010, 2012-2014 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2014-2016 Claudiu Popa
-# Copyright (c) 2015 Aru Sahni
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""utilities for Pylint configuration :
-
-* pylintrc
-* pylint.d (PYLINTHOME)
-"""
-from __future__ import print_function
-
-# TODO(cpopa): this module contains the logic for the
-# configuration parser and for the command line parser,
-# but it's really coupled to optparse's internals.
-# The code was copied almost verbatim from logilab.common,
-# in order to not depend on it anymore and it will definitely
-# need a cleanup. It could be completely reengineered as well.
-
-import contextlib
-import collections
-import copy
-import io
-import optparse
-import os
-import pickle
-import re
-import sys
-import time
-
-import configparser
-from six.moves import range
-
-from pylint import utils
-
-
-USER_HOME = os.path.expanduser('~')
-if 'PYLINTHOME' in os.environ:
- PYLINT_HOME = os.environ['PYLINTHOME']
- if USER_HOME == '~':
- USER_HOME = os.path.dirname(PYLINT_HOME)
-elif USER_HOME == '~':
- PYLINT_HOME = ".pylint.d"
-else:
- PYLINT_HOME = os.path.join(USER_HOME, '.pylint.d')
-
-
-def _get_pdata_path(base_name, recurs):
- base_name = base_name.replace(os.sep, '_')
- return os.path.join(PYLINT_HOME, "%s%s%s"%(base_name, recurs, '.stats'))
-
-
-def load_results(base):
- data_file = _get_pdata_path(base, 1)
- try:
- with open(data_file, _PICK_LOAD) as stream:
- return pickle.load(stream)
- except Exception: # pylint: disable=broad-except
- return {}
-
-if sys.version_info < (3, 0):
- _PICK_DUMP, _PICK_LOAD = 'w', 'r'
-else:
- _PICK_DUMP, _PICK_LOAD = 'wb', 'rb'
-
-def save_results(results, base):
- if not os.path.exists(PYLINT_HOME):
- try:
- os.mkdir(PYLINT_HOME)
- except OSError:
- print('Unable to create directory %s' % PYLINT_HOME, file=sys.stderr)
- data_file = _get_pdata_path(base, 1)
- try:
- with open(data_file, _PICK_DUMP) as stream:
- pickle.dump(results, stream)
- except (IOError, OSError) as ex:
- print('Unable to create file %s: %s' % (data_file, ex), file=sys.stderr)
-
-
-def find_pylintrc():
- """search the pylint rc file and return its path if it find it, else None
- """
- # is there a pylint rc file in the current directory ?
- if os.path.exists('pylintrc'):
- return os.path.abspath('pylintrc')
- if os.path.exists('.pylintrc'):
- return os.path.abspath('.pylintrc')
- if os.path.isfile('__init__.py'):
- curdir = os.path.abspath(os.getcwd())
- while os.path.isfile(os.path.join(curdir, '__init__.py')):
- curdir = os.path.abspath(os.path.join(curdir, '..'))
- if os.path.isfile(os.path.join(curdir, 'pylintrc')):
- return os.path.join(curdir, 'pylintrc')
- if os.path.isfile(os.path.join(curdir, '.pylintrc')):
- return os.path.join(curdir, '.pylintrc')
- if 'PYLINTRC' in os.environ and os.path.exists(os.environ['PYLINTRC']):
- pylintrc = os.environ['PYLINTRC']
- else:
- user_home = os.path.expanduser('~')
- if user_home == '~' or user_home == '/root':
- pylintrc = ".pylintrc"
- else:
- pylintrc = os.path.join(user_home, '.pylintrc')
- if not os.path.isfile(pylintrc):
- pylintrc = os.path.join(user_home, '.config', 'pylintrc')
- if not os.path.isfile(pylintrc):
- if os.path.isfile('/etc/pylintrc'):
- pylintrc = '/etc/pylintrc'
- else:
- pylintrc = None
- return pylintrc
-
-PYLINTRC = find_pylintrc()
-
-ENV_HELP = '''
-The following environment variables are used:
- * PYLINTHOME
- Path to the directory where the persistent for the run will be stored. If
-not found, it defaults to ~/.pylint.d/ or .pylint.d (in the current working
-directory).
- * PYLINTRC
- Path to the configuration file. See the documentation for the method used
-to search for configuration file.
-''' % globals()
-
-
-class UnsupportedAction(Exception):
- """raised by set_option when it doesn't know what to do for an action"""
-
-
-def _multiple_choice_validator(choices, name, value):
- values = utils._check_csv(value)
- for csv_value in values:
- if csv_value not in choices:
- msg = "option %s: invalid value: %r, should be in %s"
- raise optparse.OptionValueError(msg % (name, csv_value, choices))
- return values
-
-
-def _choice_validator(choices, name, value):
- if value not in choices:
- msg = "option %s: invalid value: %r, should be in %s"
- raise optparse.OptionValueError(msg % (name, value, choices))
- return value
-
-# pylint: disable=unused-argument
-def _csv_validator(_, name, value):
- return utils._check_csv(value)
-
-
-# pylint: disable=unused-argument
-def _regexp_validator(_, name, value):
- if hasattr(value, 'pattern'):
- return value
- return re.compile(value)
-
-# pylint: disable=unused-argument
-def _regexp_csv_validator(_, name, value):
- return [_regexp_validator(_, name, val) for val in _csv_validator(_, name, value)]
-
-def _yn_validator(opt, _, value):
- if isinstance(value, int):
- return bool(value)
- if value in ('y', 'yes'):
- return True
- if value in ('n', 'no'):
- return False
- msg = "option %s: invalid yn value %r, should be in (y, yes, n, no)"
- raise optparse.OptionValueError(msg % (opt, value))
-
-
-def _non_empty_string_validator(opt, _, value):
- if not value:
- msg = "indent string can't be empty."
- raise optparse.OptionValueError(msg)
- return utils._unquote(value)
-
-
-VALIDATORS = {
- 'string': utils._unquote,
- 'int': int,
- 'regexp': re.compile,
- 'regexp_csv': _regexp_csv_validator,
- 'csv': _csv_validator,
- 'yn': _yn_validator,
- 'choice': lambda opt, name, value: _choice_validator(opt['choices'], name, value),
- 'multiple_choice': lambda opt, name, value: _multiple_choice_validator(opt['choices'],
- name, value),
- 'non_empty_string': _non_empty_string_validator,
-}
-
-def _call_validator(opttype, optdict, option, value):
- if opttype not in VALIDATORS:
- raise Exception('Unsupported type "%s"' % opttype)
- try:
- return VALIDATORS[opttype](optdict, option, value)
- except TypeError:
- try:
- return VALIDATORS[opttype](value)
- except Exception:
- raise optparse.OptionValueError('%s value (%r) should be of type %s' %
- (option, value, opttype))
-
-
-def _validate(value, optdict, name=''):
- """return a validated value for an option according to its type
-
- optional argument name is only used for error message formatting
- """
- try:
- _type = optdict['type']
- except KeyError:
- # FIXME
- return value
- return _call_validator(_type, optdict, name, value)
-
-
-def _level_options(group, outputlevel):
- return [option for option in group.option_list
- if (getattr(option, 'level', 0) or 0) <= outputlevel
- and option.help is not optparse.SUPPRESS_HELP]
-
-
-def _expand_default(self, option):
- """Patch OptionParser.expand_default with custom behaviour
-
- This will handle defaults to avoid overriding values in the
- configuration file.
- """
- if self.parser is None or not self.default_tag:
- return option.help
- optname = option._long_opts[0][2:]
- try:
- provider = self.parser.options_manager._all_options[optname]
- except KeyError:
- value = None
- else:
- optdict = provider.get_option_def(optname)
- optname = provider.option_attrname(optname, optdict)
- value = getattr(provider.config, optname, optdict)
- value = utils._format_option_value(optdict, value)
- if value is optparse.NO_DEFAULT or not value:
- value = self.NO_DEFAULT_VALUE
- return option.help.replace(self.default_tag, str(value))
-
-
-@contextlib.contextmanager
-def _patch_optparse():
- orig_default = optparse.HelpFormatter
- try:
- optparse.HelpFormatter.expand_default = _expand_default
- yield
- finally:
- optparse.HelpFormatter.expand_default = orig_default
-
-
-def _multiple_choices_validating_option(opt, name, value):
- return _multiple_choice_validator(opt.choices, name, value)
-
-
-class Option(optparse.Option):
- TYPES = optparse.Option.TYPES + ('regexp', 'regexp_csv', 'csv', 'yn',
- 'multiple_choice',
- 'non_empty_string')
- ATTRS = optparse.Option.ATTRS + ['hide', 'level']
- TYPE_CHECKER = copy.copy(optparse.Option.TYPE_CHECKER)
- TYPE_CHECKER['regexp'] = _regexp_validator
- TYPE_CHECKER['regexp_csv'] = _regexp_csv_validator
- TYPE_CHECKER['csv'] = _csv_validator
- TYPE_CHECKER['yn'] = _yn_validator
- TYPE_CHECKER['multiple_choice'] = _multiple_choices_validating_option
- TYPE_CHECKER['non_empty_string'] = _non_empty_string_validator
-
- def __init__(self, *opts, **attrs):
- optparse.Option.__init__(self, *opts, **attrs)
- if hasattr(self, "hide") and self.hide:
- self.help = optparse.SUPPRESS_HELP
-
- def _check_choice(self):
- if self.type in ("choice", "multiple_choice"):
- if self.choices is None:
- raise optparse.OptionError(
- "must supply a list of choices for type 'choice'", self)
- elif not isinstance(self.choices, (tuple, list)):
- raise optparse.OptionError(
- "choices must be a list of strings ('%s' supplied)"
- % str(type(self.choices)).split("'")[1], self)
- elif self.choices is not None:
- raise optparse.OptionError(
- "must not supply choices for type %r" % self.type, self)
- optparse.Option.CHECK_METHODS[2] = _check_choice
-
- def process(self, opt, value, values, parser):
- # First, convert the value(s) to the right type. Howl if any
- # value(s) are bogus.
- value = self.convert_value(opt, value)
- if self.type == 'named':
- existent = getattr(values, self.dest)
- if existent:
- existent.update(value)
- value = existent
- # And then take whatever action is expected of us.
- # This is a separate method to make life easier for
- # subclasses to add new actions.
- return self.take_action(
- self.action, self.dest, opt, value, values, parser)
-
-
-class OptionParser(optparse.OptionParser):
-
- def __init__(self, option_class=Option, *args, **kwargs):
- optparse.OptionParser.__init__(self, option_class=Option, *args, **kwargs)
-
- def format_option_help(self, formatter=None):
- if formatter is None:
- formatter = self.formatter
- outputlevel = getattr(formatter, 'output_level', 0)
- formatter.store_option_strings(self)
- result = []
- result.append(formatter.format_heading("Options"))
- formatter.indent()
- if self.option_list:
- result.append(optparse.OptionContainer.format_option_help(self, formatter))
- result.append("\n")
- for group in self.option_groups:
- if group.level <= outputlevel and (
- group.description or _level_options(group, outputlevel)):
- result.append(group.format_help(formatter))
- result.append("\n")
- formatter.dedent()
- # Drop the last "\n", or the header if no options or option groups:
- return "".join(result[:-1])
-
- def _match_long_opt(self, opt):
- """Disable abbreviations."""
- if opt not in self._long_opt:
- raise optparse.BadOptionError(opt)
- return opt
-
-
-# pylint: disable=abstract-method; by design?
-class _ManHelpFormatter(optparse.HelpFormatter):
-
- def __init__(self, indent_increment=0, max_help_position=24,
- width=79, short_first=0):
- optparse.HelpFormatter.__init__(
- self, indent_increment, max_help_position, width, short_first)
-
- def format_heading(self, heading):
- return '.SH %s\n' % heading.upper()
-
- def format_description(self, description):
- return description
-
- def format_option(self, option):
- try:
- optstring = option.option_strings
- except AttributeError:
- optstring = self.format_option_strings(option)
- if option.help:
- help_text = self.expand_default(option)
- help = ' '.join([l.strip() for l in help_text.splitlines()])
- else:
- help = ''
- return '''.IP "%s"
-%s
-''' % (optstring, help)
-
- def format_head(self, optparser, pkginfo, section=1):
- long_desc = ""
- try:
- pgm = optparser._get_prog_name()
- except AttributeError:
- # py >= 2.4.X (dunno which X exactly, at least 2)
- pgm = optparser.get_prog_name()
- short_desc = self.format_short_description(pgm, pkginfo.description)
- if hasattr(pkginfo, "long_desc"):
- long_desc = self.format_long_description(pgm, pkginfo.long_desc)
- return '%s\n%s\n%s\n%s' % (self.format_title(pgm, section),
- short_desc, self.format_synopsis(pgm),
- long_desc)
-
- @staticmethod
- def format_title(pgm, section):
- date = '-'.join(str(num) for num in time.localtime()[:3])
- return '.TH %s %s "%s" %s' % (pgm, section, date, pgm)
-
- @staticmethod
- def format_short_description(pgm, short_desc):
- return '''.SH NAME
-.B %s
-\\- %s
-''' % (pgm, short_desc.strip())
-
- @staticmethod
- def format_synopsis(pgm):
- return '''.SH SYNOPSIS
-.B %s
-[
-.I OPTIONS
-] [
-.I
-]
-''' % pgm
-
- @staticmethod
- def format_long_description(pgm, long_desc):
- long_desc = '\n'.join(line.lstrip()
- for line in long_desc.splitlines())
- long_desc = long_desc.replace('\n.\n', '\n\n')
- if long_desc.lower().startswith(pgm):
- long_desc = long_desc[len(pgm):]
- return '''.SH DESCRIPTION
-.B %s
-%s
-''' % (pgm, long_desc.strip())
-
- @staticmethod
- def format_tail(pkginfo):
- tail = '''.SH SEE ALSO
-/usr/share/doc/pythonX.Y-%s/
-
-.SH BUGS
-Please report bugs on the project\'s mailing list:
-%s
-
-.SH AUTHOR
-%s <%s>
-''' % (getattr(pkginfo, 'debian_name', pkginfo.modname),
- pkginfo.mailinglist, pkginfo.author, pkginfo.author_email)
-
- if hasattr(pkginfo, "copyright"):
- tail += '''
-.SH COPYRIGHT
-%s
-''' % pkginfo.copyright
-
- return tail
-
-
-class OptionsManagerMixIn(object):
- """Handle configuration from both a configuration file and command line options"""
-
- def __init__(self, usage, config_file=None, version=None, quiet=0):
- self.config_file = config_file
- self.reset_parsers(usage, version=version)
- # list of registered options providers
- self.options_providers = []
- # dictionary associating option name to checker
- self._all_options = collections.OrderedDict()
- self._short_options = {}
- self._nocallback_options = {}
- self._mygroups = {}
- # verbosity
- self.quiet = quiet
- self._maxlevel = 0
-
- def reset_parsers(self, usage='', version=None):
- # configuration file parser
- self.cfgfile_parser = configparser.ConfigParser(inline_comment_prefixes=('#', ';'))
- # command line parser
- self.cmdline_parser = OptionParser(usage=usage, version=version)
- self.cmdline_parser.options_manager = self
- self._optik_option_attrs = set(self.cmdline_parser.option_class.ATTRS)
-
- def register_options_provider(self, provider, own_group=True):
- """register an options provider"""
- assert provider.priority <= 0, "provider's priority can't be >= 0"
- for i in range(len(self.options_providers)):
- if provider.priority > self.options_providers[i].priority:
- self.options_providers.insert(i, provider)
- break
- else:
- self.options_providers.append(provider)
- non_group_spec_options = [option for option in provider.options
- if 'group' not in option[1]]
- groups = getattr(provider, 'option_groups', ())
- if own_group and non_group_spec_options:
- self.add_option_group(provider.name.upper(), provider.__doc__,
- non_group_spec_options, provider)
- else:
- for opt, optdict in non_group_spec_options:
- self.add_optik_option(provider, self.cmdline_parser, opt, optdict)
- for gname, gdoc in groups:
- gname = gname.upper()
- goptions = [option for option in provider.options
- if option[1].get('group', '').upper() == gname]
- self.add_option_group(gname, gdoc, goptions, provider)
-
- def add_option_group(self, group_name, _, options, provider):
- # add option group to the command line parser
- if group_name in self._mygroups:
- group = self._mygroups[group_name]
- else:
- group = optparse.OptionGroup(self.cmdline_parser,
- title=group_name.capitalize())
- self.cmdline_parser.add_option_group(group)
- group.level = provider.level
- self._mygroups[group_name] = group
- # add section to the config file
- if group_name != "DEFAULT" and \
- group_name not in self.cfgfile_parser._sections:
- self.cfgfile_parser.add_section(group_name)
- # add provider's specific options
- for opt, optdict in options:
- self.add_optik_option(provider, group, opt, optdict)
-
- def add_optik_option(self, provider, optikcontainer, opt, optdict):
- args, optdict = self.optik_option(provider, opt, optdict)
- option = optikcontainer.add_option(*args, **optdict)
- self._all_options[opt] = provider
- self._maxlevel = max(self._maxlevel, option.level or 0)
-
- def optik_option(self, provider, opt, optdict):
- """get our personal option definition and return a suitable form for
- use with optik/optparse
- """
- optdict = copy.copy(optdict)
- if 'action' in optdict:
- self._nocallback_options[provider] = opt
- else:
- optdict['action'] = 'callback'
- optdict['callback'] = self.cb_set_provider_option
- # default is handled here and *must not* be given to optik if you
- # want the whole machinery to work
- if 'default' in optdict:
- if ('help' in optdict
- and optdict.get('default') is not None
- and optdict['action'] not in ('store_true', 'store_false')):
- optdict['help'] += ' [current: %default]'
- del optdict['default']
- args = ['--' + str(opt)]
- if 'short' in optdict:
- self._short_options[optdict['short']] = opt
- args.append('-' + optdict['short'])
- del optdict['short']
- # cleanup option definition dict before giving it to optik
- for key in list(optdict.keys()):
- if key not in self._optik_option_attrs:
- optdict.pop(key)
- return args, optdict
-
- def cb_set_provider_option(self, option, opt, value, parser):
- """optik callback for option setting"""
- if opt.startswith('--'):
- # remove -- on long option
- opt = opt[2:]
- else:
- # short option, get its long equivalent
- opt = self._short_options[opt[1:]]
- # trick since we can't set action='store_true' on options
- if value is None:
- value = 1
- self.global_set_option(opt, value)
-
- def global_set_option(self, opt, value):
- """set option on the correct option provider"""
- self._all_options[opt].set_option(opt, value)
-
- def generate_config(self, stream=None, skipsections=(), encoding=None):
- """write a configuration file according to the current configuration
- into the given stream or stdout
- """
- options_by_section = {}
- sections = []
- for provider in self.options_providers:
- for section, options in provider.options_by_section():
- if section is None:
- section = provider.name
- if section in skipsections:
- continue
- options = [(n, d, v) for (n, d, v) in options
- if d.get('type') is not None
- and not d.get('deprecated')]
- if not options:
- continue
- if section not in sections:
- sections.append(section)
- alloptions = options_by_section.setdefault(section, [])
- alloptions += options
- stream = stream or sys.stdout
- encoding = utils._get_encoding(encoding, stream)
- printed = False
- for section in sections:
- if printed:
- print('\n', file=stream)
- utils.format_section(stream, section.upper(),
- sorted(options_by_section[section]),
- encoding)
- printed = True
-
- def generate_manpage(self, pkginfo, section=1, stream=None):
- with _patch_optparse():
- _generate_manpage(self.cmdline_parser, pkginfo,
- section, stream=stream or sys.stdout,
- level=self._maxlevel)
-
- def load_provider_defaults(self):
- """initialize configuration using default values"""
- for provider in self.options_providers:
- provider.load_defaults()
-
- def read_config_file(self, config_file=None):
- """read the configuration file but do not load it (i.e. dispatching
- values to each options provider)
- """
- helplevel = 1
- while helplevel <= self._maxlevel:
- opt = '-'.join(['long'] * helplevel) + '-help'
- if opt in self._all_options:
- break # already processed
- # pylint: disable=unused-argument
- def helpfunc(option, opt, val, p, level=helplevel):
- print(self.help(level))
- sys.exit(0)
- helpmsg = '%s verbose help.' % ' '.join(['more'] * helplevel)
- optdict = {'action': 'callback', 'callback': helpfunc,
- 'help': helpmsg}
- provider = self.options_providers[0]
- self.add_optik_option(provider, self.cmdline_parser, opt, optdict)
- provider.options += ((opt, optdict),)
- helplevel += 1
- if config_file is None:
- config_file = self.config_file
- if config_file is not None:
- config_file = os.path.expanduser(config_file)
- if config_file and os.path.exists(config_file):
- parser = self.cfgfile_parser
-
- # Use this encoding in order to strip the BOM marker, if any.
- with io.open(config_file, 'r', encoding='utf_8_sig') as fp:
- # pylint: disable=deprecated-method
- parser.readfp(fp)
-
- # normalize sections'title
- for sect, values in list(parser._sections.items()):
- if not sect.isupper() and values:
- parser._sections[sect.upper()] = values
- elif not self.quiet:
- msg = 'No config file found, using default configuration'
- print(msg, file=sys.stderr)
- return
-
- def load_config_file(self):
- """dispatch values previously read from a configuration file to each
- options provider)
- """
- parser = self.cfgfile_parser
- for section in parser.sections():
- for option, value in parser.items(section):
- try:
- self.global_set_option(option, value)
- except (KeyError, optparse.OptionError):
- # TODO handle here undeclared options appearing in the config file
- continue
-
- def load_configuration(self, **kwargs):
- """override configuration according to given parameters"""
- return self.load_configuration_from_config(kwargs)
-
- def load_configuration_from_config(self, config):
- for opt, opt_value in config.items():
- opt = opt.replace('_', '-')
- provider = self._all_options[opt]
- provider.set_option(opt, opt_value)
-
- def load_command_line_configuration(self, args=None):
- """Override configuration according to command line parameters
-
- return additional arguments
- """
- with _patch_optparse():
- if args is None:
- args = sys.argv[1:]
- else:
- args = list(args)
- (options, args) = self.cmdline_parser.parse_args(args=args)
- for provider in self._nocallback_options:
- config = provider.config
- for attr in config.__dict__.keys():
- value = getattr(options, attr, None)
- if value is None:
- continue
- setattr(config, attr, value)
- return args
-
- def add_help_section(self, title, description, level=0):
- """add a dummy option section for help purpose """
- group = optparse.OptionGroup(self.cmdline_parser,
- title=title.capitalize(),
- description=description)
- group.level = level
- self._maxlevel = max(self._maxlevel, level)
- self.cmdline_parser.add_option_group(group)
-
- def help(self, level=0):
- """return the usage string for available options """
- self.cmdline_parser.formatter.output_level = level
- with _patch_optparse():
- return self.cmdline_parser.format_help()
-
-
-class OptionsProviderMixIn(object):
- """Mixin to provide options to an OptionsManager"""
-
- # those attributes should be overridden
- priority = -1
- name = 'default'
- options = ()
- level = 0
-
- def __init__(self):
- self.config = optparse.Values()
- self.load_defaults()
-
- def load_defaults(self):
- """initialize the provider using default values"""
- for opt, optdict in self.options:
- action = optdict.get('action')
- if action != 'callback':
- # callback action have no default
- if optdict is None:
- optdict = self.get_option_def(opt)
- default = optdict.get('default')
- self.set_option(opt, default, action, optdict)
-
- def option_attrname(self, opt, optdict=None):
- """get the config attribute corresponding to opt"""
- if optdict is None:
- optdict = self.get_option_def(opt)
- return optdict.get('dest', opt.replace('-', '_'))
-
- def option_value(self, opt):
- """get the current value for the given option"""
- return getattr(self.config, self.option_attrname(opt), None)
-
- def set_option(self, optname, value, action=None, optdict=None):
- """method called to set an option (registered in the options list)"""
- if optdict is None:
- optdict = self.get_option_def(optname)
- if value is not None:
- value = _validate(value, optdict, optname)
- if action is None:
- action = optdict.get('action', 'store')
- if action == 'store':
- setattr(self.config, self.option_attrname(optname, optdict), value)
- elif action in ('store_true', 'count'):
- setattr(self.config, self.option_attrname(optname, optdict), 0)
- elif action == 'store_false':
- setattr(self.config, self.option_attrname(optname, optdict), 1)
- elif action == 'append':
- optname = self.option_attrname(optname, optdict)
- _list = getattr(self.config, optname, None)
- if _list is None:
- if isinstance(value, (list, tuple)):
- _list = value
- elif value is not None:
- _list = []
- _list.append(value)
- setattr(self.config, optname, _list)
- elif isinstance(_list, tuple):
- setattr(self.config, optname, _list + (value,))
- else:
- _list.append(value)
- elif action == 'callback':
- optdict['callback'](None, optname, value, None)
- else:
- raise UnsupportedAction(action)
-
- def get_option_def(self, opt):
- """return the dictionary defining an option given its name"""
- assert self.options
- for option in self.options:
- if option[0] == opt:
- return option[1]
- raise optparse.OptionError('no such option %s in section %r'
- % (opt, self.name), opt)
-
- def options_by_section(self):
- """return an iterator on options grouped by section
-
- (section, [list of (optname, optdict, optvalue)])
- """
- sections = {}
- for optname, optdict in self.options:
- sections.setdefault(optdict.get('group'), []).append(
- (optname, optdict, self.option_value(optname)))
- if None in sections:
- yield None, sections.pop(None)
- for section, options in sorted(sections.items()):
- yield section.upper(), options
-
- def options_and_values(self, options=None):
- if options is None:
- options = self.options
- for optname, optdict in options:
- yield (optname, optdict, self.option_value(optname))
-
-
-class ConfigurationMixIn(OptionsManagerMixIn, OptionsProviderMixIn):
- """basic mixin for simple configurations which don't need the
- manager / providers model
- """
- def __init__(self, *args, **kwargs):
- if not args:
- kwargs.setdefault('usage', '')
- kwargs.setdefault('quiet', 1)
- OptionsManagerMixIn.__init__(self, *args, **kwargs)
- OptionsProviderMixIn.__init__(self)
- if not getattr(self, 'option_groups', None):
- self.option_groups = []
- for _, optdict in self.options:
- try:
- gdef = (optdict['group'].upper(), '')
- except KeyError:
- continue
- if gdef not in self.option_groups:
- self.option_groups.append(gdef)
- self.register_options_provider(self, own_group=False)
-
-
-def _generate_manpage(optparser, pkginfo, section=1,
- stream=sys.stdout, level=0):
- formatter = _ManHelpFormatter()
- formatter.output_level = level
- formatter.parser = optparser
- print(formatter.format_head(optparser, pkginfo, section), file=stream)
- print(optparser.format_option_help(formatter), file=stream)
- print(formatter.format_tail(pkginfo), file=stream)
diff --git a/pymode/libs/pylint/epylint.py b/pymode/libs/pylint/epylint.py
deleted file mode 100644
index 0b714fb4..00000000
--- a/pymode/libs/pylint/epylint.py
+++ /dev/null
@@ -1,175 +0,0 @@
-# -*- coding: utf-8;
-# mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4
-# -*- vim:fenc=utf-8:ft=python:et:sw=4:ts=4:sts=4
-
-# Copyright (c) 2008-2014 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2014 Manuel Vázquez Acosta
-# Copyright (c) 2015-2016 Claudiu Popa
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""Emacs and Flymake compatible Pylint.
-
-This script is for integration with emacs and is compatible with flymake mode.
-
-epylint walks out of python packages before invoking pylint. This avoids
-reporting import errors that occur when a module within a package uses the
-absolute import path to get another module within this package.
-
-For example:
- - Suppose a package is structured as
-
- a/__init__.py
- a/b/x.py
- a/c/y.py
-
- - Then if y.py imports x as "from a.b import x" the following produces pylint
- errors
-
- cd a/c; pylint y.py
-
- - The following obviously doesn't
-
- pylint a/c/y.py
-
- - As this script will be invoked by emacs within the directory of the file
- we are checking we need to go out of it to avoid these false positives.
-
-
-You may also use py_run to run pylint with desired options and get back (or not)
-its output.
-"""
-from __future__ import print_function
-
-import os
-import os.path as osp
-import sys
-import shlex
-from subprocess import Popen, PIPE
-
-import six
-
-
-def _get_env():
- '''Extracts the environment PYTHONPATH and appends the current sys.path to
- those.'''
- env = dict(os.environ)
- env['PYTHONPATH'] = os.pathsep.join(sys.path)
- return env
-
-def lint(filename, options=None):
- """Pylint the given file.
-
- When run from emacs we will be in the directory of a file, and passed its
- filename. If this file is part of a package and is trying to import other
- modules from within its own package or another package rooted in a directory
- below it, pylint will classify it as a failed import.
-
- To get around this, we traverse down the directory tree to find the root of
- the package this module is in. We then invoke pylint from this directory.
-
- Finally, we must correct the filenames in the output generated by pylint so
- Emacs doesn't become confused (it will expect just the original filename,
- while pylint may extend it with extra directories if we've traversed down
- the tree)
- """
- # traverse downwards until we are out of a python package
- full_path = osp.abspath(filename)
- parent_path = osp.dirname(full_path)
- child_path = osp.basename(full_path)
-
- while parent_path != "/" and osp.exists(osp.join(parent_path, '__init__.py')):
- child_path = osp.join(osp.basename(parent_path), child_path)
- parent_path = osp.dirname(parent_path)
-
- # Start pylint
- # Ensure we use the python and pylint associated with the running epylint
- run_cmd = "import sys; from pylint.lint import Run; Run(sys.argv[1:])"
- options = options or ['--disable=C,R,I']
- cmd = [sys.executable, "-c", run_cmd] + [
- '--msg-template', '{path}:{line}: {category} ({msg_id}, {symbol}, {obj}) {msg}',
- '-r', 'n', child_path] + options
- process = Popen(cmd, stdout=PIPE, cwd=parent_path, env=_get_env(),
- universal_newlines=True)
-
- for line in process.stdout:
- # remove pylintrc warning
- if line.startswith("No config file found"):
- continue
-
- # modify the file name thats output to reverse the path traversal we made
- parts = line.split(":")
- if parts and parts[0] == child_path:
- line = ":".join([filename] + parts[1:])
- print(line, end=' ')
-
- process.wait()
- return process.returncode
-
-
-def py_run(command_options='', return_std=False, stdout=None, stderr=None):
- """Run pylint from python
-
- ``command_options`` is a string containing ``pylint`` command line options;
- ``return_std`` (boolean) indicates return of created standard output
- and error (see below);
- ``stdout`` and ``stderr`` are 'file-like' objects in which standard output
- could be written.
-
- Calling agent is responsible for stdout/err management (creation, close).
- Default standard output and error are those from sys,
- or standalone ones (``subprocess.PIPE``) are used
- if they are not set and ``return_std``.
-
- If ``return_std`` is set to ``True``, this function returns a 2-uple
- containing standard output and error related to created process,
- as follows: ``(stdout, stderr)``.
-
- A trivial usage could be as follows:
- >>> py_run( '--version')
- No config file found, using default configuration
- pylint 0.18.1,
- ...
-
- To silently run Pylint on a module, and get its standard output and error:
- >>> (pylint_stdout, pylint_stderr) = py_run( 'module_name.py', True)
- """
- # Create command line to call pylint
- epylint_part = [sys.executable, "-c", "from pylint import epylint;epylint.Run()"]
- options = shlex.split(command_options)
- cli = epylint_part + options
-
- # Providing standard output and/or error if not set
- if stdout is None:
- if return_std:
- stdout = PIPE
- else:
- stdout = sys.stdout
- if stderr is None:
- if return_std:
- stderr = PIPE
- else:
- stderr = sys.stderr
- # Call pylint in a subprocess
- process = Popen(cli, shell=False, stdout=stdout, stderr=stderr,
- env=_get_env(), universal_newlines=True)
- proc_stdout, proc_stderr = process.communicate()
- # Return standard output and error
- if return_std:
- return six.moves.StringIO(proc_stdout), six.moves.StringIO(proc_stderr)
-
-
-def Run():
- if len(sys.argv) == 1:
- print("Usage: %s [options]" % sys.argv[0])
- sys.exit(1)
- elif not osp.exists(sys.argv[1]):
- print("%s does not exist" % sys.argv[1])
- sys.exit(1)
- else:
- sys.exit(lint(sys.argv[1], sys.argv[2:]))
-
-
-if __name__ == '__main__':
- Run()
diff --git a/pymode/libs/pylint/exceptions.py b/pymode/libs/pylint/exceptions.py
deleted file mode 100644
index 429379d9..00000000
--- a/pymode/libs/pylint/exceptions.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2016 Glenn Matthews
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""Exception classes raised by various operations within pylint."""
-
-
-class InvalidMessageError(Exception):
- """raised when a message creation, registration or addition is rejected"""
-
-class UnknownMessageError(Exception):
- """raised when a unregistered message id is encountered"""
-
-class EmptyReportError(Exception):
- """raised when a report is empty and so should not be displayed"""
diff --git a/pymode/libs/pylint/extensions/_check_docs_utils.py b/pymode/libs/pylint/extensions/_check_docs_utils.py
deleted file mode 100644
index 920e02f3..00000000
--- a/pymode/libs/pylint/extensions/_check_docs_utils.py
+++ /dev/null
@@ -1,580 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2016 Ashley Whetter
-# Copyright (c) 2016 Moisés López
-# Copyright (c) 2016 Glenn Matthews
-# Copyright (c) 2016 Claudiu Popa
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""Utility methods for docstring checking."""
-
-from __future__ import absolute_import, print_function
-
-import re
-
-import astroid
-
-from pylint.checkers.utils import (
- inherit_from_std_ex,
- node_ignores_exception,
- safe_infer,
-)
-
-
-def space_indentation(s):
- """The number of leading spaces in a string
-
- :param str s: input string
-
- :rtype: int
- :return: number of leading spaces
- """
- return len(s) - len(s.lstrip(' '))
-
-
-def returns_something(return_node):
- """Check if a return node returns a value other than None.
-
- :param return_node: The return node to check.
- :type return_node: astroid.Return
-
- :rtype: bool
- :return: True if the return node returns a value other than None,
- False otherwise.
- """
- returns = return_node.value
-
- if returns is None:
- return False
-
- return not (isinstance(returns, astroid.Const) and returns.value is None)
-
-
-def possible_exc_types(node):
- """
- Gets all of the possible raised exception types for the given raise node.
-
- .. note::
-
- Caught exception types are ignored.
-
-
- :param node: The raise node to find exception types for.
- :type node: astroid.node_classes.NodeNG
-
- :returns: A list of exception types possibly raised by :param:`node`.
- :rtype: list(str)
- """
- excs = []
- if isinstance(node.exc, astroid.Name):
- inferred = safe_infer(node.exc)
- if inferred:
- excs = [inferred.name]
- elif (isinstance(node.exc, astroid.Call) and
- isinstance(node.exc.func, astroid.Name)):
- target = safe_infer(node.exc.func)
- if isinstance(target, astroid.ClassDef):
- excs = [target.name]
- elif isinstance(target, astroid.FunctionDef):
- for ret in target.nodes_of_class(astroid.Return):
- if ret.frame() != target:
- # return from inner function - ignore it
- continue
-
- val = safe_infer(ret.value)
- if (val and isinstance(val, (astroid.Instance, astroid.ClassDef))
- and inherit_from_std_ex(val)):
- excs.append(val.name)
- elif node.exc is None:
- handler = node.parent
- while handler and not isinstance(handler, astroid.ExceptHandler):
- handler = handler.parent
-
- if handler and handler.type:
- inferred_excs = astroid.unpack_infer(handler.type)
- excs = (exc.name for exc in inferred_excs
- if exc is not astroid.Uninferable)
-
-
- try:
- return set(exc for exc in excs if not node_ignores_exception(node, exc))
- except astroid.InferenceError:
- return ()
-
-
-def docstringify(docstring):
- for docstring_type in [SphinxDocstring, EpytextDocstring,
- GoogleDocstring, NumpyDocstring]:
- instance = docstring_type(docstring)
- if instance.is_valid():
- return instance
-
- return Docstring(docstring)
-
-
-class Docstring(object):
- re_for_parameters_see = re.compile(r"""
- For\s+the\s+(other)?\s*parameters\s*,\s+see
- """, re.X | re.S)
-
- supports_yields = None
- """True if the docstring supports a "yield" section.
-
- False if the docstring uses the returns section to document generators.
- """
-
- # These methods are designed to be overridden
- # pylint: disable=no-self-use
- def __init__(self, doc):
- doc = doc or ""
- self.doc = doc.expandtabs()
-
- def is_valid(self):
- return False
-
- def exceptions(self):
- return set()
-
- def has_params(self):
- return False
-
- def has_returns(self):
- return False
-
- def has_rtype(self):
- return False
-
- def has_yields(self):
- return False
-
- def has_yields_type(self):
- return False
-
- def match_param_docs(self):
- return set(), set()
-
- def params_documented_elsewhere(self):
- return self.re_for_parameters_see.search(self.doc) is not None
-
-
-class SphinxDocstring(Docstring):
- re_type = r"[\w\.]+"
-
- re_xref = r"""
- (?::\w+:)? # optional tag
- `{0}` # what to reference
- """.format(re_type)
-
- re_param_raw = r"""
- : # initial colon
- (?: # Sphinx keywords
- param|parameter|
- arg|argument|
- key|keyword
- )
- \s+ # whitespace
-
- (?: # optional type declaration
- ({type})
- \s+
- )?
-
- (\w+) # Parameter name
- \s* # whitespace
- : # final colon
- """.format(type=re_type)
- re_param_in_docstring = re.compile(re_param_raw, re.X | re.S)
-
- re_type_raw = r"""
- :type # Sphinx keyword
- \s+ # whitespace
- ({type}) # Parameter name
- \s* # whitespace
- : # final colon
- """.format(type=re_type)
- re_type_in_docstring = re.compile(re_type_raw, re.X | re.S)
-
- re_raise_raw = r"""
- : # initial colon
- (?: # Sphinx keyword
- raises?|
- except|exception
- )
- \s+ # whitespace
-
- (?: # type declaration
- ({type})
- \s+
- )?
-
- (\w+) # Parameter name
- \s* # whitespace
- : # final colon
- """.format(type=re_type)
- re_raise_in_docstring = re.compile(re_raise_raw, re.X | re.S)
-
- re_rtype_in_docstring = re.compile(r":rtype:")
-
- re_returns_in_docstring = re.compile(r":returns?:")
-
- supports_yields = False
-
- def is_valid(self):
- return bool(self.re_param_in_docstring.search(self.doc) or
- self.re_raise_in_docstring.search(self.doc) or
- self.re_rtype_in_docstring.search(self.doc) or
- self.re_returns_in_docstring.search(self.doc))
-
- def exceptions(self):
- types = set()
-
- for match in re.finditer(self.re_raise_in_docstring, self.doc):
- raise_type = match.group(2)
- types.add(raise_type)
-
- return types
-
- def has_params(self):
- if not self.doc:
- return False
-
- return self.re_param_in_docstring.search(self.doc) is not None
-
- def has_returns(self):
- if not self.doc:
- return False
-
- return bool(self.re_returns_in_docstring.search(self.doc))
-
- def has_rtype(self):
- if not self.doc:
- return False
-
- return bool(self.re_rtype_in_docstring.search(self.doc))
-
- def match_param_docs(self):
- params_with_doc = set()
- params_with_type = set()
-
- for match in re.finditer(self.re_param_in_docstring, self.doc):
- name = match.group(2)
- params_with_doc.add(name)
- param_type = match.group(1)
- if param_type is not None:
- params_with_type.add(name)
-
- params_with_type.update(re.findall(self.re_type_in_docstring, self.doc))
- return params_with_doc, params_with_type
-
-
-class EpytextDocstring(SphinxDocstring):
- """
- Epytext is similar to Sphinx. See the docs:
- http://epydoc.sourceforge.net/epytext.html
- http://epydoc.sourceforge.net/fields.html#fields
-
- It's used in PyCharm:
- https://www.jetbrains.com/help/pycharm/2016.1/creating-documentation-comments.html#d848203e314
- https://www.jetbrains.com/help/pycharm/2016.1/using-docstrings-to-specify-types.html
- """
- re_param_in_docstring = re.compile(
- SphinxDocstring.re_param_raw.replace(':', '@', 1),
- re.X | re.S)
-
- re_type_in_docstring = re.compile(
- SphinxDocstring.re_type_raw.replace(':', '@', 1),
- re.X | re.S)
-
- re_raise_in_docstring = re.compile(
- SphinxDocstring.re_raise_raw.replace(':', '@', 1),
- re.X | re.S)
-
- re_rtype_in_docstring = re.compile(r"""
- @ # initial "at" symbol
- (?: # Epytext keyword
- rtype|returntype
- )
- : # final colon
- """, re.X | re.S)
-
- re_returns_in_docstring = re.compile(r"@returns?:")
-
-
-class GoogleDocstring(Docstring):
- re_type = SphinxDocstring.re_type
-
- re_xref = SphinxDocstring.re_xref
-
- re_container_type = r"""
- (?:{type}|{xref}) # a container type
- [\(\[] [^\n]+ [\)\]] # with the contents of the container
- """.format(type=re_type, xref=re_xref)
-
- re_multiple_type = r"""
- (?:{container_type}|{type})
- (?:\s+or\s+(?:{container_type}|{type}))*
- """.format(type=re_type, container_type=re_container_type)
-
- _re_section_template = r"""
- ^([ ]*) {0} \s*: \s*$ # Google parameter header
- ( .* ) # section
- """
-
- re_param_section = re.compile(
- _re_section_template.format(r"(?:Args|Arguments|Parameters)"),
- re.X | re.S | re.M
- )
-
- re_param_line = re.compile(r"""
- \s* \*{{0,2}}(\w+) # identifier potentially with asterisks
- \s* ( [(]
- {type}
- [)] )? \s* : # optional type declaration
- \s* (.*) # beginning of optional description
- """.format(
- type=re_multiple_type,
- ), re.X | re.S | re.M)
-
- re_raise_section = re.compile(
- _re_section_template.format(r"Raises"),
- re.X | re.S | re.M
- )
-
- re_raise_line = re.compile(r"""
- \s* ({type}) \s* : # identifier
- \s* (.*) # beginning of optional description
- """.format(type=re_type), re.X | re.S | re.M)
-
- re_returns_section = re.compile(
- _re_section_template.format(r"Returns?"),
- re.X | re.S | re.M
- )
-
- re_returns_line = re.compile(r"""
- \s* ({type}:)? # identifier
- \s* (.*) # beginning of description
- """.format(
- type=re_multiple_type,
- ), re.X | re.S | re.M)
-
- re_yields_section = re.compile(
- _re_section_template.format(r"Yields?"),
- re.X | re.S | re.M
- )
-
- re_yields_line = re_returns_line
-
- supports_yields = True
-
- def is_valid(self):
- return bool(self.re_param_section.search(self.doc) or
- self.re_raise_section.search(self.doc) or
- self.re_returns_section.search(self.doc) or
- self.re_yields_section.search(self.doc))
-
- def has_params(self):
- if not self.doc:
- return False
-
- return self.re_param_section.search(self.doc) is not None
-
- def has_returns(self):
- if not self.doc:
- return False
-
- entries = self._parse_section(self.re_returns_section)
- for entry in entries:
- match = self.re_returns_line.match(entry)
- if not match:
- continue
-
- return_desc = match.group(2)
- if return_desc:
- return True
-
- return False
-
- def has_rtype(self):
- if not self.doc:
- return False
-
- entries = self._parse_section(self.re_returns_section)
- for entry in entries:
- match = self.re_returns_line.match(entry)
- if not match:
- continue
-
- return_type = match.group(1)
- if return_type:
- return True
-
- return False
-
- def has_yields(self):
- if not self.doc:
- return False
-
- entries = self._parse_section(self.re_yields_section)
- for entry in entries:
- match = self.re_yields_line.match(entry)
- if not match:
- continue
-
- yield_desc = match.group(2)
- if yield_desc:
- return True
-
- return False
-
- def has_yields_type(self):
- if not self.doc:
- return False
-
- entries = self._parse_section(self.re_yields_section)
- for entry in entries:
- match = self.re_yields_line.match(entry)
- if not match:
- continue
-
- yield_type = match.group(1)
- if yield_type:
- return True
-
- return False
-
- def exceptions(self):
- types = set()
-
- entries = self._parse_section(self.re_raise_section)
- for entry in entries:
- match = self.re_raise_line.match(entry)
- if not match:
- continue
-
- exc_type = match.group(1)
- exc_desc = match.group(2)
- if exc_desc:
- types.add(exc_type)
-
- return types
-
- def match_param_docs(self):
- params_with_doc = set()
- params_with_type = set()
-
- entries = self._parse_section(self.re_param_section)
- for entry in entries:
- match = self.re_param_line.match(entry)
- if not match:
- continue
-
- param_name = match.group(1)
- param_type = match.group(2)
- param_desc = match.group(3)
- if param_type:
- params_with_type.add(param_name)
-
- if param_desc:
- params_with_doc.add(param_name)
-
- return params_with_doc, params_with_type
-
- @staticmethod
- def min_section_indent(section_match):
- return len(section_match.group(1)) + 1
-
- def _parse_section(self, section_re):
- section_match = section_re.search(self.doc)
- if section_match is None:
- return []
-
- min_indentation = self.min_section_indent(section_match)
-
- entries = []
- entry = []
- is_first = True
- for line in section_match.group(2).splitlines():
- if not line.strip():
- continue
- indentation = space_indentation(line)
- if indentation < min_indentation:
- break
-
- # The first line after the header defines the minimum
- # indentation.
- if is_first:
- min_indentation = indentation
- is_first = False
-
- if indentation == min_indentation:
- # Lines with minimum indentation must contain the beginning
- # of a new parameter documentation.
- if entry:
- entries.append("\n".join(entry))
- entry = []
-
- entry.append(line)
-
- if entry:
- entries.append("\n".join(entry))
-
- return entries
-
-
-class NumpyDocstring(GoogleDocstring):
- _re_section_template = r"""
- ^([ ]*) {0} \s*?$ # Numpy parameters header
- \s* [-=]+ \s*?$ # underline
- ( .* ) # section
- """
-
- re_param_section = re.compile(
- _re_section_template.format(r"(?:Args|Arguments|Parameters)"),
- re.X | re.S | re.M
- )
-
- re_param_line = re.compile(r"""
- \s* (\w+) # identifier
- \s* :
- \s* (?:({type})(?:,\s+optional)?)? # optional type declaration
- \n # description starts on a new line
- \s* (.*) # description
- """.format(
- type=GoogleDocstring.re_multiple_type,
- ), re.X | re.S)
-
- re_raise_section = re.compile(
- _re_section_template.format(r"Raises"),
- re.X | re.S | re.M
- )
-
- re_raise_line = re.compile(r"""
- \s* ({type})$ # type declaration
- \s* (.*) # optional description
- """.format(type=GoogleDocstring.re_type), re.X | re.S | re.M)
-
- re_returns_section = re.compile(
- _re_section_template.format(r"Returns?"),
- re.X | re.S | re.M
- )
-
- re_returns_line = re.compile(r"""
- \s* ({type})$ # type declaration
- \s* (.*) # optional description
- """.format(
- type=GoogleDocstring.re_multiple_type,
- ), re.X | re.S | re.M)
-
- re_yields_section = re.compile(
- _re_section_template.format(r"Yields?"),
- re.X | re.S | re.M
- )
-
- re_yields_line = re_returns_line
-
- supports_yields = True
-
- @staticmethod
- def min_section_indent(section_match):
- return len(section_match.group(1))
diff --git a/pymode/libs/pylint/extensions/bad_builtin.py b/pymode/libs/pylint/extensions/bad_builtin.py
deleted file mode 100644
index 9876922e..00000000
--- a/pymode/libs/pylint/extensions/bad_builtin.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# Copyright (c) 2016 Claudiu Popa
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""Checker for deprecated builtins."""
-import sys
-
-import astroid
-from pylint.checkers import BaseChecker
-from pylint.checkers.utils import check_messages
-from pylint.interfaces import IAstroidChecker
-
-
-BAD_FUNCTIONS = ['map', 'filter']
-if sys.version_info < (3, 0):
- BAD_FUNCTIONS.append('input')
-# Some hints regarding the use of bad builtins.
-BUILTIN_HINTS = {
- 'map': 'Using a list comprehension can be clearer.',
-}
-BUILTIN_HINTS['filter'] = BUILTIN_HINTS['map']
-
-
-class BadBuiltinChecker(BaseChecker):
-
- __implements__ = (IAstroidChecker, )
- name = 'deprecated_builtins'
- msgs = {'W0141': ('Used builtin function %s',
- 'bad-builtin',
- 'Used when a black listed builtin function is used (see the '
- 'bad-function option). Usual black listed functions are the ones '
- 'like map, or filter , where Python offers now some cleaner '
- 'alternative like list comprehension.'),
- }
-
- options = (('bad-functions',
- {'default' : BAD_FUNCTIONS,
- 'type' :'csv', 'metavar' : '',
- 'help' : 'List of builtins function names that should not be '
- 'used, separated by a comma'}
- ),
- )
-
- @check_messages('bad-builtin')
- def visit_call(self, node):
- if isinstance(node.func, astroid.Name):
- name = node.func.name
- # ignore the name if it's not a builtin (i.e. not defined in the
- # locals nor globals scope)
- if not (name in node.frame() or name in node.root()):
- if name in self.config.bad_functions:
- hint = BUILTIN_HINTS.get(name)
- if hint:
- args = "%r. %s" % (name, hint)
- else:
- args = repr(name)
- self.add_message('bad-builtin', node=node, args=args)
-
-
-def register(linter):
- """Required method to auto register this checker.
-
- :param linter: Main interface object for Pylint plugins
- :type linter: Pylint object
- """
- linter.register_checker(BadBuiltinChecker(linter))
diff --git a/pymode/libs/pylint/extensions/check_docs.py b/pymode/libs/pylint/extensions/check_docs.py
deleted file mode 100644
index a01d6fa4..00000000
--- a/pymode/libs/pylint/extensions/check_docs.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2014-2015 Bruno Daniel
-# Copyright (c) 2015-2016 Claudiu Popa
-# Copyright (c) 2016 Ashley Whetter
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-import warnings
-
-from pylint.extensions import docparams
-
-
-def register(linter):
- """Required method to auto register this checker.
-
- :param linter: Main interface object for Pylint plugins
- :type linter: Pylint object
- """
- warnings.warn("This plugin is deprecated, use pylint.extensions.docparams instead.",
- DeprecationWarning)
- linter.register_checker(docparams.DocstringParameterChecker(linter))
diff --git a/pymode/libs/pylint/extensions/check_elif.py b/pymode/libs/pylint/extensions/check_elif.py
deleted file mode 100644
index 3fbe1c34..00000000
--- a/pymode/libs/pylint/extensions/check_elif.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# Copyright (c) 2015 LOGILAB S.A. (Paris, FRANCE)
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-import astroid
-from pylint.checkers import BaseTokenChecker
-from pylint.checkers.utils import check_messages
-from pylint.interfaces import ITokenChecker, IAstroidChecker
-
-
-class ElseifUsedChecker(BaseTokenChecker):
- """Checks for use of "else if" when a "elif" could be used
- """
-
- __implements__ = (ITokenChecker, IAstroidChecker)
- name = 'else_if_used'
- msgs = {'R5501': ('Consider using "elif" instead of "else if"',
- 'else-if-used',
- 'Used when an else statement is immediately followed by '
- 'an if statement and does not contain statements that '
- 'would be unrelated to it.'),
- }
-
- def __init__(self, linter=None):
- BaseTokenChecker.__init__(self, linter)
- self._init()
-
- def _init(self):
- self._elifs = []
- self._if_counter = 0
-
- def process_tokens(self, tokens):
- # Process tokens and look for 'if' or 'elif'
- for _, token, _, _, _ in tokens:
- if token == 'elif':
- self._elifs.append(True)
- elif token == 'if':
- self._elifs.append(False)
-
- def leave_module(self, _):
- self._init()
-
- def visit_ifexp(self, _):
- self._if_counter += 1
-
- def visit_comprehension(self, node):
- self._if_counter += len(node.ifs)
-
- @check_messages('else-if-used')
- def visit_if(self, node):
- if isinstance(node.parent, astroid.If):
- orelse = node.parent.orelse
- # current if node must directly follow a "else"
- if orelse and orelse == [node]:
- if not self._elifs[self._if_counter]:
- self.add_message('else-if-used', node=node)
- self._if_counter += 1
-
-
-def register(linter):
- """Required method to auto register this checker.
-
- :param linter: Main interface object for Pylint plugins
- :type linter: Pylint object
- """
- linter.register_checker(ElseifUsedChecker(linter))
diff --git a/pymode/libs/pylint/extensions/comparetozero.py b/pymode/libs/pylint/extensions/comparetozero.py
deleted file mode 100644
index 00e3eae5..00000000
--- a/pymode/libs/pylint/extensions/comparetozero.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2016 Alexander Todorov
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""Looks for comparisons to empty string."""
-
-import itertools
-
-import astroid
-
-from pylint import interfaces
-from pylint import checkers
-from pylint.checkers import utils
-
-
-def _is_constant_zero(node):
- return isinstance(node, astroid.Const) and node.value == 0
-
-
-class CompareToZeroChecker(checkers.BaseChecker):
- """Checks for comparisons to zero.
- Most of the times you should use the fact that integers with a value of 0 are false.
- An exception to this rule is when 0 is allowed in the program and has a
- different meaning than None!
- """
-
- __implements__ = (interfaces.IAstroidChecker,)
-
- # configuration section name
- name = 'compare-to-zero'
- msgs = {'C2001': ('Avoid comparisons to zero',
- 'compare-to-zero',
- 'Used when Pylint detects comparison to a 0 constant.'),
- }
-
- priority = -2
- options = ()
-
- @utils.check_messages('compare-to-zero')
- def visit_compare(self, node):
- _operators = ['!=', '==', 'is not', 'is']
- # note: astroid.Compare has the left most operand in node.left
- # while the rest are a list of tuples in node.ops
- # the format of the tuple is ('compare operator sign', node)
- # here we squash everything into `ops` to make it easier for processing later
- ops = [('', node.left)]
- ops.extend(node.ops)
- ops = list(itertools.chain(*ops))
-
- for ops_idx in range(len(ops) - 2):
- op_1 = ops[ops_idx]
- op_2 = ops[ops_idx + 1]
- op_3 = ops[ops_idx + 2]
- error_detected = False
-
- # 0 ?? X
- if _is_constant_zero(op_1) and op_2 in _operators + ['<']:
- error_detected = True
- # X ?? 0
- elif op_2 in _operators + ['>'] and _is_constant_zero(op_3):
- error_detected = True
-
- if error_detected:
- self.add_message('compare-to-zero', node=node)
-
-
-def register(linter):
- """Required method to auto register this checker."""
- linter.register_checker(CompareToZeroChecker(linter))
diff --git a/pymode/libs/pylint/extensions/docparams.py b/pymode/libs/pylint/extensions/docparams.py
deleted file mode 100644
index 43e46351..00000000
--- a/pymode/libs/pylint/extensions/docparams.py
+++ /dev/null
@@ -1,419 +0,0 @@
-# Copyright (c) 2014-2015 Bruno Daniel
-# Copyright (c) 2015-2016 Claudiu Popa
-# Copyright (c) 2016 Ashley Whetter
-# Copyright (c) 2016 Glenn Matthews
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""Pylint plugin for checking in Sphinx, Google, or Numpy style docstrings
-"""
-from __future__ import print_function, division, absolute_import
-
-import astroid
-
-from pylint.interfaces import IAstroidChecker
-from pylint.checkers import BaseChecker
-from pylint.checkers.utils import node_frame_class
-import pylint.extensions._check_docs_utils as utils
-
-
-class DocstringParameterChecker(BaseChecker):
- """Checker for Sphinx, Google, or Numpy style docstrings
-
- * Check that all function, method and constructor parameters are mentioned
- in the params and types part of the docstring. Constructor parameters
- can be documented in either the class docstring or ``__init__`` docstring,
- but not both.
- * Check that there are no naming inconsistencies between the signature and
- the documentation, i.e. also report documented parameters that are missing
- in the signature. This is important to find cases where parameters are
- renamed only in the code, not in the documentation.
- * Check that all explicitly raised exceptions in a function are documented
- in the function docstring. Caught exceptions are ignored.
-
- Activate this checker by adding the line::
-
- load-plugins=pylint.extensions.docparams
-
- to the ``MASTER`` section of your ``.pylintrc``.
-
- :param linter: linter object
- :type linter: :class:`pylint.lint.PyLinter`
- """
- __implements__ = IAstroidChecker
-
- name = 'parameter_documentation'
- msgs = {
- 'W9005': ('"%s" has constructor parameters documented in class and __init__',
- 'multiple-constructor-doc',
- 'Please remove parameter declarations in the class or constructor.'),
- 'W9006': ('"%s" not documented as being raised',
- 'missing-raises-doc',
- 'Please document exceptions for all raised exception types.'),
- 'W9008': ('Redundant returns documentation',
- 'redundant-returns-doc',
- 'Please remove the return/rtype documentation from this method.'),
- 'W9010': ('Redundant yields documentation',
- 'redundant-yields-doc',
- 'Please remove the yields documentation from this method.'),
- 'W9011': ('Missing return documentation',
- 'missing-return-doc',
- 'Please add documentation about what this method returns.',
- {'old_names': [('W9007', 'missing-returns-doc')]}),
- 'W9012': ('Missing return type documentation',
- 'missing-return-type-doc',
- 'Please document the type returned by this method.',
- # we can't use the same old_name for two different warnings
- # {'old_names': [('W9007', 'missing-returns-doc')]},
- ),
- 'W9013': ('Missing yield documentation',
- 'missing-yield-doc',
- 'Please add documentation about what this generator yields.',
- {'old_names': [('W9009', 'missing-yields-doc')]}),
- 'W9014': ('Missing yield type documentation',
- 'missing-yield-type-doc',
- 'Please document the type yielded by this method.',
- # we can't use the same old_name for two different warnings
- # {'old_names': [('W9009', 'missing-yields-doc')]},
- ),
- 'W9015': ('"%s" missing in parameter documentation',
- 'missing-param-doc',
- 'Please add parameter declarations for all parameters.',
- {'old_names': [('W9003', 'missing-param-doc')]}),
- 'W9016': ('"%s" missing in parameter type documentation',
- 'missing-type-doc',
- 'Please add parameter type declarations for all parameters.',
- {'old_names': [('W9004', 'missing-type-doc')]}),
- 'W9017': ('"%s" differing in parameter documentation',
- 'differing-param-doc',
- 'Please check parameter names in declarations.',
- ),
- 'W9018': ('"%s" differing in parameter type documentation',
- 'differing-type-doc',
- 'Please check parameter names in type declarations.',
- ),
- }
-
- options = (('accept-no-param-doc',
- {'default': True, 'type' : 'yn', 'metavar' : '',
- 'help': 'Whether to accept totally missing parameter '
- 'documentation in the docstring of a function that has '
- 'parameters.'
- }),
- ('accept-no-raise-doc',
- {'default': True, 'type' : 'yn', 'metavar' : '',
- 'help': 'Whether to accept totally missing raises '
- 'documentation in the docstring of a function that '
- 'raises an exception.'
- }),
- ('accept-no-return-doc',
- {'default': True, 'type' : 'yn', 'metavar' : '',
- 'help': 'Whether to accept totally missing return '
- 'documentation in the docstring of a function that '
- 'returns a statement.'
- }),
- ('accept-no-yields-doc',
- {'default': True, 'type' : 'yn', 'metavar': '',
- 'help': 'Whether to accept totally missing yields '
- 'documentation in the docstring of a generator.'
- }),
- )
-
- priority = -2
-
- constructor_names = {'__init__', '__new__'}
- not_needed_param_in_docstring = {'self', 'cls'}
-
- def visit_functiondef(self, node):
- """Called for function and method definitions (def).
-
- :param node: Node for a function or method definition in the AST
- :type node: :class:`astroid.scoped_nodes.Function`
- """
- node_doc = utils.docstringify(node.doc)
- self.check_functiondef_params(node, node_doc)
- self.check_functiondef_returns(node, node_doc)
- self.check_functiondef_yields(node, node_doc)
-
- def check_functiondef_params(self, node, node_doc):
- node_allow_no_param = None
- if node.name in self.constructor_names:
- class_node = node_frame_class(node)
- if class_node is not None:
- class_doc = utils.docstringify(class_node.doc)
- self.check_single_constructor_params(class_doc, node_doc, class_node)
-
- # __init__ or class docstrings can have no parameters documented
- # as long as the other documents them.
- node_allow_no_param = (
- class_doc.has_params() or
- class_doc.params_documented_elsewhere() or
- None
- )
- class_allow_no_param = (
- node_doc.has_params() or
- node_doc.params_documented_elsewhere() or
- None
- )
-
- self.check_arguments_in_docstring(
- class_doc, node.args, class_node, class_allow_no_param)
-
- self.check_arguments_in_docstring(
- node_doc, node.args, node, node_allow_no_param)
-
- def check_functiondef_returns(self, node, node_doc):
- if not node_doc.supports_yields and node.is_generator():
- return
-
- return_nodes = node.nodes_of_class(astroid.Return)
- if ((node_doc.has_returns() or node_doc.has_rtype()) and
- not any(utils.returns_something(ret_node) for ret_node in return_nodes)):
- self.add_message(
- 'redundant-returns-doc',
- node=node)
-
- def check_functiondef_yields(self, node, node_doc):
- if not node_doc.supports_yields:
- return
-
- if ((node_doc.has_yields() or node_doc.has_yields_type()) and
- not node.is_generator()):
- self.add_message(
- 'redundant-yields-doc',
- node=node)
-
- def visit_raise(self, node):
- func_node = node.frame()
- if not isinstance(func_node, astroid.FunctionDef):
- return
-
- expected_excs = utils.possible_exc_types(node)
- if not expected_excs:
- return
-
- doc = utils.docstringify(func_node.doc)
- if not doc.is_valid():
- if doc.doc:
- self._handle_no_raise_doc(expected_excs, func_node)
- return
-
- found_excs = doc.exceptions()
- missing_excs = expected_excs - found_excs
- self._add_raise_message(missing_excs, func_node)
-
- def visit_return(self, node):
- if not utils.returns_something(node):
- return
-
- func_node = node.frame()
- if not isinstance(func_node, astroid.FunctionDef):
- return
-
- doc = utils.docstringify(func_node.doc)
- if not doc.is_valid() and self.config.accept_no_return_doc:
- return
-
- if not doc.has_returns():
- self.add_message(
- 'missing-return-doc',
- node=func_node
- )
-
- if not doc.has_rtype():
- self.add_message(
- 'missing-return-type-doc',
- node=func_node
- )
-
- def visit_yield(self, node):
- func_node = node.frame()
- if not isinstance(func_node, astroid.FunctionDef):
- return
-
- doc = utils.docstringify(func_node.doc)
- if not doc.is_valid() and self.config.accept_no_yields_doc:
- return
-
- if doc.supports_yields:
- doc_has_yields = doc.has_yields()
- doc_has_yields_type = doc.has_yields_type()
- else:
- doc_has_yields = doc.has_returns()
- doc_has_yields_type = doc.has_rtype()
-
- if not doc_has_yields:
- self.add_message(
- 'missing-yield-doc',
- node=func_node
- )
-
- if not doc_has_yields_type:
- self.add_message(
- 'missing-yield-type-doc',
- node=func_node
- )
-
- def visit_yieldfrom(self, node):
- self.visit_yield(node)
-
- def check_arguments_in_docstring(self, doc, arguments_node, warning_node,
- accept_no_param_doc=None):
- """Check that all parameters in a function, method or class constructor
- on the one hand and the parameters mentioned in the parameter
- documentation (e.g. the Sphinx tags 'param' and 'type') on the other
- hand are consistent with each other.
-
- * Undocumented parameters except 'self' are noticed.
- * Undocumented parameter types except for 'self' and the ``*``
- and ``**`` parameters are noticed.
- * Parameters mentioned in the parameter documentation that don't or no
- longer exist in the function parameter list are noticed.
- * If the text "For the parameters, see" or "For the other parameters,
- see" (ignoring additional whitespace) is mentioned in the docstring,
- missing parameter documentation is tolerated.
- * If there's no Sphinx style, Google style or NumPy style parameter
- documentation at all, i.e. ``:param`` is never mentioned etc., the
- checker assumes that the parameters are documented in another format
- and the absence is tolerated.
-
- :param doc: Docstring for the function, method or class.
- :type doc: str
-
- :param arguments_node: Arguments node for the function, method or
- class constructor.
- :type arguments_node: :class:`astroid.scoped_nodes.Arguments`
-
- :param warning_node: The node to assign the warnings to
- :type warning_node: :class:`astroid.scoped_nodes.Node`
-
- :param accept_no_param_doc: Whether or not to allow no parameters
- to be documented.
- If None then this value is read from the configuration.
- :type accept_no_param_doc: bool or None
- """
- # Tolerate missing param or type declarations if there is a link to
- # another method carrying the same name.
- if not doc.doc:
- return
-
- if accept_no_param_doc is None:
- accept_no_param_doc = self.config.accept_no_param_doc
- tolerate_missing_params = doc.params_documented_elsewhere()
-
- # Collect the function arguments.
- expected_argument_names = set(arg.name for arg in arguments_node.args)
- expected_argument_names.update(arg.name for arg in arguments_node.kwonlyargs)
- not_needed_type_in_docstring = (
- self.not_needed_param_in_docstring.copy())
-
- if arguments_node.vararg is not None:
- expected_argument_names.add(arguments_node.vararg)
- not_needed_type_in_docstring.add(arguments_node.vararg)
- if arguments_node.kwarg is not None:
- expected_argument_names.add(arguments_node.kwarg)
- not_needed_type_in_docstring.add(arguments_node.kwarg)
- params_with_doc, params_with_type = doc.match_param_docs()
-
- # Tolerate no parameter documentation at all.
- if (not params_with_doc and not params_with_type
- and accept_no_param_doc):
- tolerate_missing_params = True
-
- def _compare_missing_args(found_argument_names, message_id,
- not_needed_names):
- """Compare the found argument names with the expected ones and
- generate a message if there are arguments missing.
-
- :param set found_argument_names: argument names found in the
- docstring
-
- :param str message_id: pylint message id
-
- :param not_needed_names: names that may be omitted
- :type not_needed_names: set of str
- """
- if not tolerate_missing_params:
- missing_argument_names = (
- (expected_argument_names - found_argument_names)
- - not_needed_names)
- if missing_argument_names:
- self.add_message(
- message_id,
- args=(', '.join(
- sorted(missing_argument_names)),),
- node=warning_node)
-
- def _compare_different_args(found_argument_names, message_id,
- not_needed_names):
- """Compare the found argument names with the expected ones and
- generate a message if there are extra arguments found.
-
- :param set found_argument_names: argument names found in the
- docstring
-
- :param str message_id: pylint message id
-
- :param not_needed_names: names that may be omitted
- :type not_needed_names: set of str
- """
- differing_argument_names = (
- (expected_argument_names ^ found_argument_names)
- - not_needed_names - expected_argument_names)
-
- if differing_argument_names:
- self.add_message(
- message_id,
- args=(', '.join(
- sorted(differing_argument_names)),),
- node=warning_node)
-
- _compare_missing_args(params_with_doc, 'missing-param-doc',
- self.not_needed_param_in_docstring)
- _compare_missing_args(params_with_type, 'missing-type-doc',
- not_needed_type_in_docstring)
-
- _compare_different_args(params_with_doc, 'differing-param-doc',
- self.not_needed_param_in_docstring)
- _compare_different_args(params_with_type, 'differing-type-doc',
- not_needed_type_in_docstring)
-
- def check_single_constructor_params(self, class_doc, init_doc, class_node):
- if class_doc.has_params() and init_doc.has_params():
- self.add_message(
- 'multiple-constructor-doc',
- args=(class_node.name,),
- node=class_node)
-
- def _handle_no_raise_doc(self, excs, node):
- if self.config.accept_no_raise_doc:
- return
-
- self._add_raise_message(excs, node)
-
- def _add_raise_message(self, missing_excs, node):
- """
- Adds a message on :param:`node` for the missing exception type.
-
- :param missing_excs: A list of missing exception types.
- :type missing_excs: list
-
- :param node: The node show the message on.
- :type node: astroid.node_classes.NodeNG
- """
- if not missing_excs:
- return
-
- self.add_message(
- 'missing-raises-doc',
- args=(', '.join(sorted(missing_excs)),),
- node=node)
-
-def register(linter):
- """Required method to auto register this checker.
-
- :param linter: Main interface object for Pylint plugins
- :type linter: Pylint object
- """
- linter.register_checker(DocstringParameterChecker(linter))
diff --git a/pymode/libs/pylint/extensions/docstyle.py b/pymode/libs/pylint/extensions/docstyle.py
deleted file mode 100644
index 12a58d09..00000000
--- a/pymode/libs/pylint/extensions/docstyle.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# Copyright (c) 2016 Luis Escobar
-# Copyright (c) 2016 Claudiu Popa
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-import linecache
-
-from pylint import checkers
-from pylint.interfaces import IAstroidChecker, HIGH
-from pylint.checkers.utils import check_messages
-
-
-class DocStringStyleChecker(checkers.BaseChecker):
- """Checks format of docstrings based on PEP 0257"""
-
- __implements__ = IAstroidChecker
- name = 'docstyle'
-
- msgs = {
- 'C0198': ('Bad docstring quotes in %s, expected """, given %s',
- 'bad-docstring-quotes',
- 'Used when a docstring does not have triple double quotes.'),
- 'C0199': ('First line empty in %s docstring',
- 'docstring-first-line-empty',
- 'Used when a blank line is found at the beginning of a docstring.'),
- }
-
- @check_messages('docstring-first-line-empty', 'bad-docstring-quotes')
- def visit_module(self, node):
- self._check_docstring('module', node)
-
- def visit_classdef(self, node):
- self._check_docstring('class', node)
-
- def visit_functiondef(self, node):
- ftype = 'method' if node.is_method() else 'function'
- self._check_docstring(ftype, node)
-
- visit_asyncfunctiondef = visit_functiondef
-
- def _check_docstring(self, node_type, node):
- docstring = node.doc
- if docstring and docstring[0] == '\n':
- self.add_message('docstring-first-line-empty', node=node,
- args=(node_type,), confidence=HIGH)
-
- # Use "linecache", instead of node.as_string(), because the latter
- # looses the original form of the docstrings.
-
- if docstring:
- lineno = node.fromlineno + 1
- line = linecache.getline(node.root().file, lineno).lstrip()
- if line and line.find('"""') == 0:
- return
- if line and '\'\'\'' in line:
- quotes = '\'\'\''
- elif line and line[0] == '"':
- quotes = '"'
- elif line and line[0] == '\'':
- quotes = '\''
- else:
- quotes = False
- if quotes:
- self.add_message('bad-docstring-quotes', node=node,
- args=(node_type, quotes), confidence=HIGH)
-
-
-def register(linter):
- """Required method to auto register this checker.
-
- :param linter: Main interface object for Pylint plugins
- :type linter: Pylint object
- """
- linter.register_checker(DocStringStyleChecker(linter))
diff --git a/pymode/libs/pylint/extensions/emptystring.py b/pymode/libs/pylint/extensions/emptystring.py
deleted file mode 100644
index d3e05e9d..00000000
--- a/pymode/libs/pylint/extensions/emptystring.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2016 Alexander Todorov
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""Looks for comparisons to empty string."""
-
-import itertools
-
-import astroid
-
-from pylint import interfaces
-from pylint import checkers
-from pylint.checkers import utils
-
-
-def _is_constant_empty_str(node):
- return isinstance(node, astroid.Const) and node.value == ''
-
-
-class CompareToEmptyStringChecker(checkers.BaseChecker):
- """Checks for comparisons to empty string.
- Most of the times you should use the fact that empty strings are false.
- An exception to this rule is when an empty string value is allowed in the program
- and has a different meaning than None!
- """
-
- __implements__ = (interfaces.IAstroidChecker,)
-
- # configuration section name
- name = 'compare-to-empty-string'
- msgs = {'C1901': ('Avoid comparisons to empty string',
- 'compare-to-empty-string',
- 'Used when Pylint detects comparison to an empty string constant.'),
- }
-
- priority = -2
- options = ()
-
- @utils.check_messages('compare-to-empty-string')
- def visit_compare(self, node):
- _operators = ['!=', '==', 'is not', 'is']
- # note: astroid.Compare has the left most operand in node.left
- # while the rest are a list of tuples in node.ops
- # the format of the tuple is ('compare operator sign', node)
- # here we squash everything into `ops` to make it easier for processing later
- ops = [('', node.left)]
- ops.extend(node.ops)
- ops = list(itertools.chain(*ops))
-
- for ops_idx in range(len(ops) - 2):
- op_1 = ops[ops_idx]
- op_2 = ops[ops_idx + 1]
- op_3 = ops[ops_idx + 2]
- error_detected = False
-
- # x ?? ""
- if _is_constant_empty_str(op_1) and op_2 in _operators:
- error_detected = True
- # '' ?? X
- elif op_2 in _operators and _is_constant_empty_str(op_3):
- error_detected = True
-
- if error_detected:
- self.add_message('compare-to-empty-string', node=node)
-
-
-def register(linter):
- """Required method to auto register this checker."""
- linter.register_checker(CompareToEmptyStringChecker(linter))
diff --git a/pymode/libs/pylint/extensions/mccabe.py b/pymode/libs/pylint/extensions/mccabe.py
deleted file mode 100644
index 8e231ccd..00000000
--- a/pymode/libs/pylint/extensions/mccabe.py
+++ /dev/null
@@ -1,170 +0,0 @@
-# Copyright (c) 2016 Claudiu Popa
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""Module to add McCabe checker class for pylint. """
-
-from __future__ import absolute_import
-
-from mccabe import PathGraph as Mccabe_PathGraph, \
- PathGraphingAstVisitor as Mccabe_PathGraphingAstVisitor
-from pylint import checkers
-from pylint.checkers.utils import check_messages
-from pylint.interfaces import HIGH, IAstroidChecker
-
-
-class PathGraph(Mccabe_PathGraph):
- def __init__(self, node):
- super(PathGraph, self).__init__(name='', entity='', lineno=1)
- self.root = node
-
-
-class PathGraphingAstVisitor(Mccabe_PathGraphingAstVisitor):
- def __init__(self):
- super(PathGraphingAstVisitor, self).__init__()
- self._bottom_counter = 0
-
- def default(self, node, *args):
- for child in node.get_children():
- self.dispatch(child, *args)
-
- def dispatch(self, node, *args):
- self.node = node
- klass = node.__class__
- meth = self._cache.get(klass)
- if meth is None:
- className = klass.__name__
- meth = getattr(self.visitor, 'visit' + className, self.default)
- self._cache[klass] = meth
- return meth(node, *args)
-
- def visitFunctionDef(self, node):
- if self.graph is not None:
- # closure
- pathnode = self._append_node(node)
- self.tail = pathnode
- self.dispatch_list(node.body)
- bottom = "%s" % self._bottom_counter
- self._bottom_counter += 1
- self.graph.connect(self.tail, bottom)
- self.graph.connect(node, bottom)
- self.tail = bottom
- else:
- self.graph = PathGraph(node)
- self.tail = node
- self.dispatch_list(node.body)
- self.graphs["%s%s" % (self.classname, node.name)] = self.graph
- self.reset()
-
- visitAsyncFunctionDef = visitFunctionDef
-
- def visitSimpleStatement(self, node):
- self._append_node(node)
-
- visitAssert = visitAssign = visitAugAssign = visitDelete = visitPrint = \
- visitRaise = visitYield = visitImport = visitCall = visitSubscript = \
- visitPass = visitContinue = visitBreak = visitGlobal = visitReturn = \
- visitExpr = visitAwait = visitSimpleStatement
-
- def visitWith(self, node):
- self._append_node(node)
- self.dispatch_list(node.body)
-
- visitAsyncWith = visitWith
-
- def _append_node(self, node):
- if not self.tail:
- return
- self.graph.connect(self.tail, node)
- self.tail = node
- return node
-
- def _subgraph(self, node, name, extra_blocks=()):
- """create the subgraphs representing any `if` and `for` statements"""
- if self.graph is None:
- # global loop
- self.graph = PathGraph(node)
- self._subgraph_parse(node, node, extra_blocks)
- self.graphs["%s%s" % (self.classname, name)] = self.graph
- self.reset()
- else:
- self._append_node(node)
- self._subgraph_parse(node, node, extra_blocks)
-
- def _subgraph_parse(self, node, pathnode, extra_blocks): # pylint: disable=unused-argument
- """parse the body and any `else` block of `if` and `for` statements"""
- loose_ends = []
- self.tail = node
- self.dispatch_list(node.body)
- loose_ends.append(self.tail)
- for extra in extra_blocks:
- self.tail = node
- self.dispatch_list(extra.body)
- loose_ends.append(self.tail)
- if node.orelse:
- self.tail = node
- self.dispatch_list(node.orelse)
- loose_ends.append(self.tail)
- else:
- loose_ends.append(node)
- if node:
- bottom = "%s" % self._bottom_counter
- self._bottom_counter += 1
- for le in loose_ends:
- self.graph.connect(le, bottom)
- self.tail = bottom
-
-
-class McCabeMethodChecker(checkers.BaseChecker):
- """Checks McCabe complexity cyclomatic threshold in methods and functions
- to validate a too complex code.
- """
-
- __implements__ = IAstroidChecker
- name = 'design'
-
- msgs = {
- 'R1260': (
- "%s is too complex. The McCabe rating is %d",
- 'too-complex',
- 'Used when a method or function is too complex based on '
- 'McCabe Complexity Cyclomatic'),
- }
- options = (
- ('max-complexity', {
- 'default': 10,
- 'type': 'int',
- 'metavar': '',
- 'help': 'McCabe complexity cyclomatic threshold',
- }),
- )
-
- @check_messages('too-complex')
- def visit_module(self, node):
- """visit an astroid.Module node to check too complex rating and
- add message if is greather than max_complexity stored from options"""
- visitor = PathGraphingAstVisitor()
- for child in node.body:
- visitor.preorder(child, visitor)
- for graph in visitor.graphs.values():
- complexity = graph.complexity()
- node = graph.root
- if hasattr(node, 'name'):
- node_name = "'%s'" % node.name
- else:
- node_name = "This '%s'" % node.__class__.__name__.lower()
- if complexity <= self.config.max_complexity:
- continue
- self.add_message(
- 'too-complex', node=node, confidence=HIGH,
- args=(node_name, complexity))
-
-
-def register(linter):
- """Required method to auto register this checker.
-
- :param linter: Main interface object for Pylint plugins
- :type linter: Pylint object
- """
- linter.register_checker(McCabeMethodChecker(linter))
diff --git a/pymode/libs/pylint/extensions/overlapping_exceptions.py b/pymode/libs/pylint/extensions/overlapping_exceptions.py
deleted file mode 100644
index c231ba35..00000000
--- a/pymode/libs/pylint/extensions/overlapping_exceptions.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""Looks for overlapping exceptions."""
-
-import astroid
-
-from pylint import interfaces
-from pylint import checkers
-from pylint.checkers import utils
-
-from pylint.checkers.exceptions import _annotated_unpack_infer
-
-
-class OverlappingExceptionsChecker(checkers.BaseChecker):
- """Checks for two or more exceptions in the same exception handler
- clause that are identical or parts of the same inheritance hierarchy
- (i.e. overlapping)."""
-
- __implements__ = interfaces.IAstroidChecker
-
- name = 'overlap-except'
- msgs = {'W0714': ('Overlapping exceptions (%s)',
- 'overlapping-except',
- 'Used when exceptions in handler overlap or are identical')}
- priority = -2
- options = ()
-
- @utils.check_messages('overlapping-except')
- def visit_tryexcept(self, node):
- """check for empty except"""
- for handler in node.handlers:
- if handler.type is None:
- continue
- if isinstance(handler.type, astroid.BoolOp):
- continue
- try:
- excs = list(_annotated_unpack_infer(handler.type))
- except astroid.InferenceError:
- continue
-
- handled_in_clause = []
- for part, exc in excs:
- if exc is astroid.YES:
- continue
- if (isinstance(exc, astroid.Instance) and
- utils.inherit_from_std_ex(exc)):
- # pylint: disable=protected-access
- exc = exc._proxied
-
- if not isinstance(exc, astroid.ClassDef):
- continue
-
- exc_ancestors = [anc for anc in exc.ancestors()
- if isinstance(anc, astroid.ClassDef)]
-
- for prev_part, prev_exc in handled_in_clause:
- prev_exc_ancestors = [anc for anc in prev_exc.ancestors()
- if isinstance(anc, astroid.ClassDef)]
- if exc == prev_exc:
- self.add_message('overlapping-except',
- node=handler.type,
- args='%s and %s are the same' %
- (prev_part.as_string(),
- part.as_string()))
- elif (prev_exc in exc_ancestors or
- exc in prev_exc_ancestors):
- ancestor = part if exc in prev_exc_ancestors else prev_part
- descendant = part if prev_exc in exc_ancestors else prev_part
- self.add_message('overlapping-except',
- node=handler.type,
- args='%s is an ancestor class of %s' %
- (ancestor.as_string(), descendant.as_string()))
- handled_in_clause += [(part, exc)]
-
-
-def register(linter):
- """Required method to auto register this checker."""
- linter.register_checker(OverlappingExceptionsChecker(linter))
diff --git a/pymode/libs/pylint/extensions/redefined_variable_type.py b/pymode/libs/pylint/extensions/redefined_variable_type.py
deleted file mode 100644
index 6b6abb83..00000000
--- a/pymode/libs/pylint/extensions/redefined_variable_type.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# Copyright (c) 2016 Claudiu Popa
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-import six
-
-import astroid
-from pylint.checkers import BaseChecker
-from pylint.checkers.utils import check_messages, is_none, node_type
-from pylint.interfaces import IAstroidChecker
-
-
-BUILTINS = six.moves.builtins.__name__
-
-
-class MultipleTypesChecker(BaseChecker):
- """Checks for variable type redefinitions (NoneType excepted)
-
- At a function, method, class or module scope
-
- This rule could be improved:
-
- - Currently, if an attribute is set to different types in 2 methods of a
- same class, it won't be detected (see functional test)
- - One could improve the support for inference on assignment with tuples,
- ifexpr, etc. Also it would be great to have support for inference on
- str.split()
- """
- __implements__ = IAstroidChecker
-
- name = 'multiple_types'
- msgs = {'R0204': ('Redefinition of %s type from %s to %s',
- 'redefined-variable-type',
- 'Used when the type of a variable changes inside a '
- 'method or a function.'
- ),
- }
-
- def visit_classdef(self, _):
- self._assigns.append({})
-
- @check_messages('redefined-variable-type')
- def leave_classdef(self, _):
- self._check_and_add_messages()
-
- visit_functiondef = visit_classdef
- leave_functiondef = leave_module = leave_classdef
-
- def visit_module(self, _):
- self._assigns = [{}]
-
- def _check_and_add_messages(self):
- assigns = self._assigns.pop()
- for name, args in assigns.items():
- if len(args) <= 1:
- continue
- orig_node, orig_type = args[0]
- # Check if there is a type in the following nodes that would be
- # different from orig_type.
- for redef_node, redef_type in args[1:]:
- if redef_type == orig_type:
- continue
- # if a variable is defined to several types in a if node,
- # this is not actually redefining.
- orig_parent = orig_node.parent
- redef_parent = redef_node.parent
- if isinstance(orig_parent, astroid.If):
- if orig_parent == redef_parent:
- if (redef_node in orig_parent.orelse and
- orig_node not in orig_parent.orelse):
- orig_node, orig_type = redef_node, redef_type
- continue
- elif (isinstance(redef_parent, astroid.If) and
- redef_parent in orig_parent.nodes_of_class(astroid.If)):
- orig_node, orig_type = redef_node, redef_type
- continue
- orig_type = orig_type.replace(BUILTINS + ".", '')
- redef_type = redef_type.replace(BUILTINS + ".", '')
- self.add_message('redefined-variable-type', node=redef_node,
- args=(name, orig_type, redef_type))
- break
-
- def visit_assign(self, node):
- # we don't handle multiple assignment nor slice assignment
- target = node.targets[0]
- if isinstance(target, (astroid.Tuple, astroid.Subscript)):
- return
- # ignore NoneType
- if is_none(node):
- return
- _type = node_type(node.value)
- if _type:
- self._assigns[-1].setdefault(target.as_string(), []).append(
- (node, _type.pytype()))
-
-
-def register(linter):
- """Required method to auto register this checker.
-
- :param linter: Main interface object for Pylint plugins
- :type linter: Pylint object
- """
- linter.register_checker(MultipleTypesChecker(linter))
diff --git a/pymode/libs/pylint/graph.py b/pymode/libs/pylint/graph.py
deleted file mode 100644
index f2c2c5bb..00000000
--- a/pymode/libs/pylint/graph.py
+++ /dev/null
@@ -1,170 +0,0 @@
-# Copyright (c) 2015 Florian Bruhin
-# Copyright (c) 2015-2016 Claudiu Popa
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""Graph manipulation utilities.
-
-(dot generation adapted from pypy/translator/tool/make_dot.py)
-"""
-
-import os.path as osp
-import os
-import sys
-import tempfile
-import codecs
-
-def target_info_from_filename(filename):
- """Transforms /some/path/foo.png into ('/some/path', 'foo.png', 'png')."""
- basename = osp.basename(filename)
- storedir = osp.dirname(osp.abspath(filename))
- target = filename.split('.')[-1]
- return storedir, basename, target
-
-
-class DotBackend(object):
- """Dot File backend."""
- def __init__(self, graphname, rankdir=None, size=None, ratio=None,
- charset='utf-8', renderer='dot', additional_param=None):
- if additional_param is None:
- additional_param = {}
- self.graphname = graphname
- self.renderer = renderer
- self.lines = []
- self._source = None
- self.emit("digraph %s {" % normalize_node_id(graphname))
- if rankdir:
- self.emit('rankdir=%s' % rankdir)
- if ratio:
- self.emit('ratio=%s' % ratio)
- if size:
- self.emit('size="%s"' % size)
- if charset:
- assert charset.lower() in ('utf-8', 'iso-8859-1', 'latin1'), \
- 'unsupported charset %s' % charset
- self.emit('charset="%s"' % charset)
- for param in additional_param.items():
- self.emit('='.join(param))
-
- def get_source(self):
- """returns self._source"""
- if self._source is None:
- self.emit("}\n")
- self._source = '\n'.join(self.lines)
- del self.lines
- return self._source
-
- source = property(get_source)
-
- def generate(self, outputfile=None, dotfile=None, mapfile=None):
- """Generates a graph file.
-
- :param str outputfile: filename and path [defaults to graphname.png]
- :param str dotfile: filename and path [defaults to graphname.dot]
- :param str mapfile: filename and path
-
- :rtype: str
- :return: a path to the generated file
- """
- import subprocess # introduced in py 2.4
- name = self.graphname
- if not dotfile:
- # if 'outputfile' is a dot file use it as 'dotfile'
- if outputfile and outputfile.endswith(".dot"):
- dotfile = outputfile
- else:
- dotfile = '%s.dot' % name
- if outputfile is not None:
- storedir, _, target = target_info_from_filename(outputfile)
- if target != "dot":
- pdot, dot_sourcepath = tempfile.mkstemp(".dot", name)
- os.close(pdot)
- else:
- dot_sourcepath = osp.join(storedir, dotfile)
- else:
- target = 'png'
- pdot, dot_sourcepath = tempfile.mkstemp(".dot", name)
- ppng, outputfile = tempfile.mkstemp(".png", name)
- os.close(pdot)
- os.close(ppng)
- pdot = codecs.open(dot_sourcepath, 'w', encoding='utf8')
- pdot.write(self.source)
- pdot.close()
- if target != 'dot':
- use_shell = sys.platform == 'win32'
- if mapfile:
- subprocess.call([self.renderer, '-Tcmapx', '-o',
- mapfile, '-T', target, dot_sourcepath,
- '-o', outputfile],
- shell=use_shell)
- else:
- subprocess.call([self.renderer, '-T', target,
- dot_sourcepath, '-o', outputfile],
- shell=use_shell)
- os.unlink(dot_sourcepath)
- return outputfile
-
- def emit(self, line):
- """Adds to final output."""
- self.lines.append(line)
-
- def emit_edge(self, name1, name2, **props):
- """emit an edge from to .
- edge properties: see http://www.graphviz.org/doc/info/attrs.html
- """
- attrs = ['%s="%s"' % (prop, value) for prop, value in props.items()]
- n_from, n_to = normalize_node_id(name1), normalize_node_id(name2)
- self.emit('%s -> %s [%s];' % (n_from, n_to, ', '.join(sorted(attrs))))
-
- def emit_node(self, name, **props):
- """emit a node with given properties.
- node properties: see http://www.graphviz.org/doc/info/attrs.html
- """
- attrs = ['%s="%s"' % (prop, value) for prop, value in props.items()]
- self.emit('%s [%s];' % (normalize_node_id(name), ', '.join(sorted(attrs))))
-
-def normalize_node_id(nid):
- """Returns a suitable DOT node id for `nid`."""
- return '"%s"' % nid
-
-def get_cycles(graph_dict, vertices=None):
- '''given a dictionary representing an ordered graph (i.e. key are vertices
- and values is a list of destination vertices representing edges), return a
- list of detected cycles
- '''
- if not graph_dict:
- return ()
- result = []
- if vertices is None:
- vertices = graph_dict.keys()
- for vertice in vertices:
- _get_cycles(graph_dict, [], set(), result, vertice)
- return result
-
-def _get_cycles(graph_dict, path, visited, result, vertice):
- """recursive function doing the real work for get_cycles"""
- if vertice in path:
- cycle = [vertice]
- for node in path[::-1]:
- if node == vertice:
- break
- cycle.insert(0, node)
- # make a canonical representation
- start_from = min(cycle)
- index = cycle.index(start_from)
- cycle = cycle[index:] + cycle[0:index]
- # append it to result if not already in
- if cycle not in result:
- result.append(cycle)
- return
- path.append(vertice)
- try:
- for node in graph_dict[vertice]:
- # don't check already visited nodes again
- if node not in visited:
- _get_cycles(graph_dict, path, visited, result, node)
- visited.add(node)
- except KeyError:
- pass
- path.pop()
diff --git a/pymode/libs/pylint/interfaces.py b/pymode/libs/pylint/interfaces.py
deleted file mode 100644
index 5403818a..00000000
--- a/pymode/libs/pylint/interfaces.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# Copyright (c) 2009-2010, 2012-2013 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2013-2014 Google, Inc.
-# Copyright (c) 2015 Florian Bruhin
-# Copyright (c) 2015-2016 Claudiu Popa
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-"""Interfaces for Pylint objects"""
-from collections import namedtuple
-
-Confidence = namedtuple('Confidence', ['name', 'description'])
-# Warning Certainties
-HIGH = Confidence('HIGH', 'No false positive possible.')
-INFERENCE = Confidence('INFERENCE', 'Warning based on inference result.')
-INFERENCE_FAILURE = Confidence('INFERENCE_FAILURE',
- 'Warning based on inference with failures.')
-UNDEFINED = Confidence('UNDEFINED',
- 'Warning without any associated confidence level.')
-
-CONFIDENCE_LEVELS = [HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED]
-
-
-class Interface(object):
- """Base class for interfaces."""
- @classmethod
- def is_implemented_by(cls, instance):
- return implements(instance, cls)
-
-
-def implements(obj, interface):
- """Return true if the give object (maybe an instance or class) implements
- the interface.
- """
- kimplements = getattr(obj, '__implements__', ())
- if not isinstance(kimplements, (list, tuple)):
- kimplements = (kimplements,)
- for implementedinterface in kimplements:
- if issubclass(implementedinterface, interface):
- return True
- return False
-
-
-class IChecker(Interface):
- """This is an base interface, not designed to be used elsewhere than for
- sub interfaces definition.
- """
-
- def open(self):
- """called before visiting project (i.e set of modules)"""
-
- def close(self):
- """called after visiting project (i.e set of modules)"""
-
-
-class IRawChecker(IChecker):
- """interface for checker which need to parse the raw file
- """
-
- def process_module(self, astroid):
- """ process a module
-
- the module's content is accessible via astroid.stream
- """
-
-
-class ITokenChecker(IChecker):
- """Interface for checkers that need access to the token list."""
- def process_tokens(self, tokens):
- """Process a module.
-
- tokens is a list of all source code tokens in the file.
- """
-
-
-class IAstroidChecker(IChecker):
- """ interface for checker which prefers receive events according to
- statement type
- """
-
-
-class IReporter(Interface):
- """ reporter collect messages and display results encapsulated in a layout
- """
-
- def handle_message(self, msg):
- """Handle the given message object."""
-
- def display_reports(self, layout):
- """display results encapsulated in the layout tree
- """
-
-
-__all__ = ('IRawChecker', 'IAstroidChecker', 'ITokenChecker', 'IReporter')
diff --git a/pymode/libs/pylint/lint.py b/pymode/libs/pylint/lint.py
deleted file mode 100644
index 537d6f25..00000000
--- a/pymode/libs/pylint/lint.py
+++ /dev/null
@@ -1,1365 +0,0 @@
-# Copyright (c) 2006-2015 LOGILAB S.A. (Paris, FRANCE)
-# Copyright (c) 2011-2014 Google, Inc.
-# Copyright (c) 2012 FELD Boris
-# Copyright (c) 2014-2016 Claudiu Popa
-# Copyright (c) 2014-2015 Michal Nowikowski
-# Copyright (c) 2015 Mihai Balint
-# Copyright (c) 2015 Simu Toni
-# Copyright (c) 2015 Aru Sahni
-# Copyright (c) 2015-2016 Florian Bruhin
-# Copyright (c) 2016 Glenn Matthews
-
-# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
-# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
-
-""" %prog [options] module_or_package
-
- Check that a module satisfies a coding standard (and more !).
-
- %prog --help
-
- Display this help message and exit.
-
- %prog --help-msg [,]
-
- Display help messages about given message identifiers and exit.
-"""
-from __future__ import print_function
-
-import collections
-import contextlib
-import operator
-import os
-try:
- import multiprocessing
-except ImportError:
- multiprocessing = None
-import sys
-import tokenize
-import warnings
-
-import six
-
-import astroid
-from astroid.__pkginfo__ import version as astroid_version
-from astroid import modutils
-from pylint import checkers
-from pylint import interfaces
-from pylint import reporters
-from pylint import exceptions
-from pylint import utils
-from pylint import config
-from pylint.__pkginfo__ import version
-from pylint.reporters.ureports import nodes as report_nodes
-
-
-MANAGER = astroid.MANAGER
-
-
-def _get_new_args(message):
- location = (
- message.abspath,
- message.path,
- message.module,
- message.obj,
- message.line,
- message.column,
- )
- return (
- message.msg_id,
- message.symbol,
- location,
- message.msg,
- message.confidence,
- )
-
-def _get_python_path(filepath):
- dirname = os.path.realpath(os.path.expanduser(filepath))
- if not os.path.isdir(dirname):
- dirname = os.path.dirname(dirname)
- while True:
- if not os.path.exists(os.path.join(dirname, "__init__.py")):
- return dirname
- old_dirname = dirname
- dirname = os.path.dirname(dirname)
- if old_dirname == dirname:
- return os.getcwd()
-
-
-def _merge_stats(stats):
- merged = {}
- by_msg = collections.Counter()
- for stat in stats:
- message_stats = stat.pop('by_msg', {})
- by_msg.update(message_stats)
-
- for key, item in six.iteritems(stat):
- if key not in merged:
- merged[key] = item
- else:
- if isinstance(item, dict):
- merged[key].update(item)
- else:
- merged[key] = merged[key] + item
-
- merged['by_msg'] = by_msg
- return merged
-
-
-@contextlib.contextmanager
-def _patch_sysmodules():
- # Context manager that permits running pylint, on Windows, with -m switch
- # and with --jobs, as in 'python -2 -m pylint .. --jobs'.
- # For more details why this is needed,
- # see Python issue http://bugs.python.org/issue10845.
-
- mock_main = __name__ != '__main__' # -m switch
- if mock_main:
- sys.modules['__main__'] = sys.modules[__name__]
-
- try:
- yield
- finally:
- if mock_main:
- sys.modules.pop('__main__')
-
-
-# Python Linter class #########################################################
-
-MSGS = {
- 'F0001': ('%s',
- 'fatal',
- 'Used when an error occurred preventing the analysis of a \
- module (unable to find it for instance).'),
- 'F0002': ('%s: %s',
- 'astroid-error',
- 'Used when an unexpected error occurred while building the '
- 'Astroid representation. This is usually accompanied by a '
- 'traceback. Please report such errors !'),
- 'F0010': ('error while code parsing: %s',
- 'parse-error',
- 'Used when an exception occurred while building the Astroid '
- 'representation which could be handled by astroid.'),
-
- 'I0001': ('Unable to run raw checkers on built-in module %s',
- 'raw-checker-failed',
- 'Used to inform that a built-in module has not been checked '
- 'using the raw checkers.'),
-
- 'I0010': ('Unable to consider inline option %r',
- 'bad-inline-option',
- 'Used when an inline option is either badly formatted or can\'t '
- 'be used inside modules.'),
-
- 'I0011': ('Locally disabling %s (%s)',
- 'locally-disabled',
- 'Used when an inline option disables a message or a messages '
- 'category.'),
- 'I0012': ('Locally enabling %s (%s)',
- 'locally-enabled',
- 'Used when an inline option enables a message or a messages '
- 'category.'),
- 'I0013': ('Ignoring entire file',
- 'file-ignored',
- 'Used to inform that the file will not be checked'),
- 'I0020': ('Suppressed %s (from line %d)',
- 'suppressed-message',
- 'A message was triggered on a line, but suppressed explicitly '
- 'by a disable= comment in the file. This message is not '
- 'generated for messages that are ignored due to configuration '
- 'settings.'),
- 'I0021': ('Useless suppression of %s',
- 'useless-suppression',
- 'Reported when a message is explicitly disabled for a line or '
- 'a block of code, but never triggered.'),
- 'I0022': ('Pragma "%s" is deprecated, use "%s" instead',
- 'deprecated-pragma',
- 'Some inline pylint options have been renamed or reworked, '
- 'only the most recent form should be used. '
- 'NOTE:skip-all is only available with pylint >= 0.26',
- {'old_names': [('I0014', 'deprecated-disable-all')]}),
-
- 'E0001': ('%s',
- 'syntax-error',
- 'Used when a syntax error is raised for a module.'),
-
- 'E0011': ('Unrecognized file option %r',
- 'unrecognized-inline-option',
- 'Used when an unknown inline option is encountered.'),
- 'E0012': ('Bad option value %r',
- 'bad-option-value',
- 'Used when a bad value for an inline option is encountered.'),
- }
-
-
-if multiprocessing is not None:
- class ChildLinter(multiprocessing.Process):
- def run(self):
- # pylint: disable=no-member, unbalanced-tuple-unpacking
- tasks_queue, results_queue, self._config = self._args
-
- self._config["jobs"] = 1 # Child does not parallelize any further.
- self._python3_porting_mode = self._config.pop(
- 'python3_porting_mode', None)
- self._plugins = self._config.pop('plugins', None)
-
- # Run linter for received files/modules.
- for file_or_module in iter(tasks_queue.get, 'STOP'):
- result = self._run_linter(file_or_module[0])
- try:
- results_queue.put(result)
- except Exception as ex:
- print("internal error with sending report for module %s" %
- file_or_module, file=sys.stderr)
- print(ex, file=sys.stderr)
- results_queue.put({})
-
- def _run_linter(self, file_or_module):
- linter = PyLinter()
-
- # Register standard checkers.
- linter.load_default_plugins()
- # Load command line plugins.
- if self._plugins:
- linter.load_plugin_modules(self._plugins)
-
- linter.load_configuration_from_config(self._config)
- linter.set_reporter(reporters.CollectingReporter())
-
- # Enable the Python 3 checker mode. This option is
- # passed down from the parent linter up to here, since
- # the Python 3 porting flag belongs to the Run class,
- # instead of the Linter class.
- if self._python3_porting_mode:
- linter.python3_porting_mode()
-
- # Run the checks.
- linter.check(file_or_module)
-
- msgs = [_get_new_args(m) for m in linter.reporter.messages]
- return (file_or_module, linter.file_state.base_name, linter.current_name,
- msgs, linter.stats, linter.msg_status)
-
-
-class PyLinter(config.OptionsManagerMixIn,
- utils.MessagesHandlerMixIn,
- utils.ReportsHandlerMixIn,
- checkers.BaseTokenChecker):
- """lint Python modules using external checkers.
-
- This is the main checker controlling the other ones and the reports
- generation. It is itself both a raw checker and an astroid checker in order
- to:
- * handle message activation / deactivation at the module level
- * handle some basic but necessary stats'data (number of classes, methods...)
-
- IDE plugin developers: you may have to call
- `astroid.builder.MANAGER.astroid_cache.clear()` across runs if you want
- to ensure the latest code version is actually checked.
- """
-
- __implements__ = (interfaces.ITokenChecker, )
-
- name = 'master'
- priority = 0
- level = 0
- msgs = MSGS
-
- @staticmethod
- def make_options():
- return (('ignore',
- {'type' : 'csv', 'metavar' : '[,...]',
- 'dest' : 'black_list', 'default' : ('CVS',),
- 'help' : 'Add files or directories to the blacklist. '
- 'They should be base names, not paths.'}),
-
- ('ignore-patterns',
- {'type' : 'regexp_csv', 'metavar' : '[,...]',
- 'dest' : 'black_list_re', 'default' : (),
- 'help' : 'Add files or directories matching the regex patterns to the'
- ' blacklist. The regex matches against base names, not paths.'}),
-
- ('persistent',
- {'default': True, 'type' : 'yn', 'metavar' : '',
- 'level': 1,
- 'help' : 'Pickle collected data for later comparisons.'}),
-
- ('load-plugins',
- {'type' : 'csv', 'metavar' : '', 'default' : (),
- 'level': 1,
- 'help' : 'List of plugins (as comma separated values of '
- 'python modules names) to load, usually to register '
- 'additional checkers.'}),
-
- ('output-format',
- {'default': 'text', 'type': 'string', 'metavar' : '',
- 'short': 'f',
- 'group': 'Reports',
- 'help' : 'Set the output format. Available formats are text,'
- ' parseable, colorized, json and msvs (visual studio).'
- 'You can also give a reporter class, eg mypackage.mymodule.'
- 'MyReporterClass.'}),
-
- ('reports',
- {'default': False, 'type' : 'yn', 'metavar' : '