From 7fbf9eb0a18c093f841a1e5afe2f57a6df2e6337 Mon Sep 17 00:00:00 2001 From: Felipe Vieira Date: Tue, 8 Oct 2019 11:16:46 -0300 Subject: [PATCH 01/81] Fix import error for 'pymode.libs.six' --- autoload/pymode/breakpoint.vim | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/autoload/pymode/breakpoint.vim b/autoload/pymode/breakpoint.vim index 49394603..8e939f59 100644 --- a/autoload/pymode/breakpoint.vim +++ b/autoload/pymode/breakpoint.vim @@ -9,7 +9,10 @@ fun! pymode#breakpoint#init() "{{{ PymodePython << EOF -from pymode.libs.six import PY3 +try: + from pymode.libs.six import PY3 +except ImportError: + PY3 = False if PY3: from importlib.util import find_spec From 1e24d530abd37a45ece878883aa4dbb62a60ce66 Mon Sep 17 00:00:00 2001 From: Andrei Vacariu Date: Thu, 5 Dec 2019 14:36:31 -0800 Subject: [PATCH 02/81] Assume Python 3 is being used This commit drops support for Python 2 by removing all if/else and try/except blocks related to handling differences between Python 2 and 3. Instead, it's just assumed that Python 3 is being used. --- autoload/pymode/breakpoint.vim | 19 +------ pymode/__init__.py | 16 +++--- pymode/_compat.py | 98 ---------------------------------- pymode/async.py | 2 +- pymode/environment.py | 12 +---- pymode/run.py | 2 +- pymode/utils.py | 2 +- 7 files changed, 13 insertions(+), 138 deletions(-) delete mode 100644 pymode/_compat.py diff --git a/autoload/pymode/breakpoint.vim b/autoload/pymode/breakpoint.vim index 8e939f59..2692ca34 100644 --- a/autoload/pymode/breakpoint.vim +++ b/autoload/pymode/breakpoint.vim @@ -9,25 +9,10 @@ fun! pymode#breakpoint#init() "{{{ PymodePython << EOF -try: - from pymode.libs.six import PY3 -except ImportError: - PY3 = False - -if PY3: - from importlib.util import find_spec - def module_exists(module_name): - return find_spec(module_name) -else: - from imp import find_module - def module_exists(module_name): - try: - return find_module(module_name) - except ImportError: - return False +from importlib.util import find_spec for module in ('wdb', 'pudb', 'ipdb', 'pdb'): - if module_exists(module): + if find_spec(module): vim.command('let g:pymode_breakpoint_cmd = "import %s; %s.set_trace() # XXX BREAKPOINT"' % (module, module)) break EOF diff --git a/pymode/__init__.py b/pymode/__init__.py index ef548a45..aba22870 100644 --- a/pymode/__init__.py +++ b/pymode/__init__.py @@ -1,15 +1,13 @@ """Pymode support functions.""" -from __future__ import absolute_import - import sys +from importlib.machinery import PathFinder as _PathFinder + import vim # noqa -try: - from importlib.machinery import PathFinder as _PathFinder - if not hasattr(vim, 'find_module'): - vim.find_module = _PathFinder.find_module -except ImportError: - pass + +if not hasattr(vim, 'find_module'): + vim.find_module = _PathFinder.find_module + def auto(): """Fix PEP8 erorrs in current buffer. @@ -39,7 +37,7 @@ class Options(object): def get_documentation(): """Search documentation and append to current buffer.""" - from ._compat import StringIO + from io import StringIO sys.stdout, _ = StringIO(), sys.stdout help(vim.eval('a:word')) diff --git a/pymode/_compat.py b/pymode/_compat.py deleted file mode 100644 index d859f152..00000000 --- a/pymode/_compat.py +++ /dev/null @@ -1,98 +0,0 @@ -""" Compatibility. - - Some py2/py3 compatibility support based on a stripped down - version of six so we don't have to depend on a specific version - of it. - - :copyright: (c) 2014 by Armin Ronacher. - :license: BSD -""" -import sys - -PY2 = sys.version_info[0] == 2 -_identity = lambda x: x - - -if not PY2: - text_type = str - string_types = (str,) - integer_types = (int, ) - - iterkeys = lambda d: iter(d.keys()) - itervalues = lambda d: iter(d.values()) - iteritems = lambda d: iter(d.items()) - - from io import StringIO - from queue import Queue # noqa - - def reraise(tp, value, tb=None): - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - - implements_to_string = _identity - -else: - text_type = unicode - string_types = (str, unicode) - integer_types = (int, long) - - iterkeys = lambda d: d.iterkeys() - itervalues = lambda d: d.itervalues() - iteritems = lambda d: d.iteritems() - - from cStringIO import StringIO - from Queue import Queue - - exec('def reraise(tp, value, tb=None):\n raise tp, value, tb') - - def implements_to_string(cls): - cls.__unicode__ = cls.__str__ - cls.__str__ = lambda x: x.__unicode__().encode('utf-8') - return cls - - -def with_metaclass(meta, *bases): - # This requires a bit of explanation: the basic idea is to make a - # dummy metaclass for one level of class instantiation that replaces - # itself with the actual metaclass. Because of internal type checks - # we also need to make sure that we downgrade the custom metaclass - # for one level to something closer to type (that's why __call__ and - # __init__ comes back from type etc.). - # - # This has the advantage over six.with_metaclass in that it does not - # introduce dummy classes into the final MRO. - class metaclass(meta): - __call__ = type.__call__ - __init__ = type.__init__ - def __new__(cls, name, this_bases, d): - if this_bases is None: - return type.__new__(cls, name, (), d) - return meta(name, bases, d) - return metaclass('temporary_class', None, {}) - - -# Certain versions of pypy have a bug where clearing the exception stack -# breaks the __exit__ function in a very peculiar way. This is currently -# true for pypy 2.2.1 for instance. The second level of exception blocks -# is necessary because pypy seems to forget to check if an exception -# happend until the next bytecode instruction? -BROKEN_PYPY_CTXMGR_EXIT = False -if hasattr(sys, 'pypy_version_info'): - class _Mgr(object): - def __enter__(self): - return self - def __exit__(self, *args): - sys.exc_clear() - try: - try: - with _Mgr(): - raise AssertionError() - except: - raise - except TypeError: - BROKEN_PYPY_CTXMGR_EXIT = True - except AssertionError: - pass - -# pylama:skip=1 diff --git a/pymode/async.py b/pymode/async.py index dd314d76..d211ac4a 100644 --- a/pymode/async.py +++ b/pymode/async.py @@ -1,6 +1,6 @@ """ Python-mode async support. """ -from ._compat import Queue +from queue import Queue # noqa RESULTS = Queue() diff --git a/pymode/environment.py b/pymode/environment.py index 56f49b4a..5ac4d512 100644 --- a/pymode/environment.py +++ b/pymode/environment.py @@ -1,14 +1,10 @@ """Define interfaces.""" -from __future__ import print_function - import json import os.path import time import vim # noqa -from ._compat import PY2 - class VimPymodeEnviroment(object): @@ -53,10 +49,7 @@ def lines(self): :return list: """ - if not PY2: - return self.curbuf - - return [l.decode(self.options.get('encoding')) for l in self.curbuf] + return self.curbuf @staticmethod def var(name, to_bool=False, silence=False, default=None): @@ -201,9 +194,6 @@ def prepare_value(self, value, dumps=True): if dumps: value = json.dumps(value) - if PY2: - value = value.decode('utf-8').encode(self.options.get('encoding')) - return value def get_offset_params(self, cursor=None, base=""): diff --git a/pymode/run.py b/pymode/run.py index 5c113640..bb83fa2c 100644 --- a/pymode/run.py +++ b/pymode/run.py @@ -1,8 +1,8 @@ """ Code runnning support. """ import sys +from io import StringIO from re import compile as re -from ._compat import StringIO from .environment import env diff --git a/pymode/utils.py b/pymode/utils.py index bf77eeb9..b934828e 100644 --- a/pymode/utils.py +++ b/pymode/utils.py @@ -4,9 +4,9 @@ import threading import warnings from contextlib import contextmanager +from io import StringIO import vim # noqa -from ._compat import StringIO DEBUG = int(vim.eval('g:pymode_debug')) From f5d4aeeb9d12605cefb5e1d8ab4a0ce5e1ab6c9e Mon Sep 17 00:00:00 2001 From: Andrei Vacariu Date: Thu, 5 Dec 2019 14:47:49 -0800 Subject: [PATCH 03/81] Remove six completely --- .gitmodules | 3 --- pymode/libs/six.py | 1 - submodules/six | 1 - 3 files changed, 5 deletions(-) delete mode 120000 pymode/libs/six.py delete mode 160000 submodules/six diff --git a/.gitmodules b/.gitmodules index b93bbbf2..ada9193a 100644 --- a/.gitmodules +++ b/.gitmodules @@ -32,9 +32,6 @@ [submodule "submodules/astroid"] path = submodules/astroid url = https://github.com/PyCQA/astroid -[submodule "submodules/six"] - path = submodules/six - url = https://github.com/benjaminp/six.git [submodule "submodules/pylama"] path = submodules/pylama url = https://github.com/klen/pylama diff --git a/pymode/libs/six.py b/pymode/libs/six.py deleted file mode 120000 index f185e0e0..00000000 --- a/pymode/libs/six.py +++ /dev/null @@ -1 +0,0 @@ -../../submodules/six/six.py \ No newline at end of file diff --git a/submodules/six b/submodules/six deleted file mode 160000 index 84d07dd1..00000000 --- a/submodules/six +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 84d07dd19523a3a41385f23a744a126d00a72c79 From 311de6710d0e119a35ddd582f232e452cd840dda Mon Sep 17 00:00:00 2001 From: Andrei Vacariu Date: Fri, 6 Dec 2019 15:38:03 -0800 Subject: [PATCH 04/81] Remove checks for Python 2 from Vim script --- doc/pymode.txt | 8 ++++---- ftplugin/python/pymode.vim | 1 - plugin/pymode.vim | 17 +++-------------- readme.md | 7 +------ 4 files changed, 8 insertions(+), 25 deletions(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index 55771510..2e2b7f98 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -148,14 +148,14 @@ will appear, eg. `:botright`. 2.1. Python version ~ *pymode-python-version* -By default pymode looks for current python version supported in your Vim. -You could choose prefer version, but value will be tested on loading. +By default pymode will attempt to use Python 3, if available. However, you can +also disable all Python features of pymode. *'g:pymode_python'* > - let g:pymode_python = 'python' + let g:pymode_python = 'python3' -Values are `python`, `python3`, `disable`. If value set to `disable` most +Values are `python3`, `disable`. If value set to `disable` most python-features of **pymode** will be disabled. Set value to `python3` if you are working with python3 projects. You could use diff --git a/ftplugin/python/pymode.vim b/ftplugin/python/pymode.vim index c3ade0cd..c13aff71 100644 --- a/ftplugin/python/pymode.vim +++ b/ftplugin/python/pymode.vim @@ -232,7 +232,6 @@ if g:pymode_debug let g:pymode_debug_tempfile=matchstr( \ execute( \ g:pymode_python - \ . " from __future__ import print_function;" \ . " import os;import tempfile; marker='|';" \ . " print(marker, tempfile.gettempdir(), os.sep, " \ . "'pymode_debug_file.txt', marker, sep='', end='')"), diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 11df75be..67216a07 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -22,7 +22,7 @@ filetype plugin on if has("python3") && executable('python3') call pymode#default('g:pymode_python', 'python3') else - call pymode#default('g:pymode_python', 'python') + call pymode#default('g:pymode_python', 'disable') endif " Disable pymode warnings @@ -286,25 +286,14 @@ filetype plugin on " UltiSnips Fixes if !len(g:pymode_python) - if exists('g:_uspy') && g:_uspy == ':py' - let g:pymode_python = 'python' - elseif exists('g:_uspy') && g:_uspy == ':py3' - let g:pymode_python = 'python3' - elseif has("python") - let g:pymode_python = 'python' - elseif has("python3") + if (exists('g:_uspy') && g:_uspy == ':py3') || has("python3") let g:pymode_python = 'python3' else let g:pymode_python = 'disable' endif endif -if g:pymode_python == 'python' - - command! -nargs=1 PymodePython python - let g:UltiSnipsUsePythonVersion = 2 - -elseif g:pymode_python == 'python3' +if g:pymode_python == 'python3' command! -nargs=1 PymodePython python3 let g:UltiSnipsUsePythonVersion = 3 diff --git a/readme.md b/readme.md index 3235adec..7d6749ec 100644 --- a/readme.md +++ b/readme.md @@ -183,12 +183,7 @@ Read this section before opening an issue on the tracker. ## Python 3 syntax -By default python-mode uses python 2 syntax checking. To enable python 3 syntax -checking (e.g. for async) add: - - let g:pymode_python = 'python3' - -To your vimrc or exrc file. +By default python-mode uses python 3 syntax checking. ## Symlinks on Windows From 012ef6032c640bb2416557daa7793ea0722d3328 Mon Sep 17 00:00:00 2001 From: Andrei Vacariu Date: Fri, 6 Dec 2019 15:41:46 -0800 Subject: [PATCH 05/81] Remove __future__ import in rope.py --- pymode/rope.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/pymode/rope.py b/pymode/rope.py index ce06384d..ba5f55b2 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -1,7 +1,5 @@ """Integration with Rope library.""" -from __future__ import absolute_import, print_function - import os.path import re import site From dfbb0d9e1776549c3350f6ad53f46cff2fe9c4a1 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 14 Dec 2019 13:00:55 -0300 Subject: [PATCH 06/81] Remove logilab hardcoded dependency --- pymode/libs/logilab | 1 - pymode/libs/logilab-common-1.4.1/COPYING | 339 ---- .../libs/logilab-common-1.4.1/COPYING.LESSER | 510 ------ pymode/libs/logilab-common-1.4.1/ChangeLog | 1613 ----------------- pymode/libs/logilab-common-1.4.1/MANIFEST.in | 14 - pymode/libs/logilab-common-1.4.1/PKG-INFO | 164 -- pymode/libs/logilab-common-1.4.1/README | 150 -- .../libs/logilab-common-1.4.1/__pkginfo__.py | 61 - .../logilab-common-1.4.1/bin/logilab-pytest | 7 - .../bin/logilab-pytest.bat | 17 - .../logilab-common-1.4.1/doc/logilab-pytest.1 | 54 - pymode/libs/logilab-common-1.4.1/doc/makefile | 8 - .../logilab-common-1.4.1/logilab/__init__.py | 1 - .../logilab/common/__init__.py | 184 -- .../logilab/common/cache.py | 114 -- .../logilab/common/changelog.py | 249 --- .../logilab/common/clcommands.py | 334 ---- .../logilab/common/compat.py | 78 - .../logilab/common/configuration.py | 1108 ----------- .../logilab/common/daemon.py | 101 -- .../logilab/common/date.py | 335 ---- .../logilab/common/debugger.py | 214 --- .../logilab/common/decorators.py | 281 --- .../logilab/common/deprecation.py | 189 -- .../logilab/common/fileutils.py | 397 ---- .../logilab/common/graph.py | 282 --- .../logilab/common/interface.py | 71 - .../logilab/common/logging_ext.py | 195 -- .../logilab/common/modutils.py | 753 -------- .../logilab/common/optik_ext.py | 394 ---- .../logilab/common/optparser.py | 92 - .../logilab/common/proc.py | 277 --- .../logilab/common/pytest.py | 1304 ------------- .../logilab/common/registry.py | 1156 ------------ .../logilab/common/shellutils.py | 406 ----- .../logilab/common/sphinx_ext.py | 87 - .../logilab/common/sphinxutils.py | 122 -- .../logilab/common/table.py | 929 ---------- .../logilab/common/tasksqueue.py | 101 -- .../logilab/common/testlib.py | 708 -------- .../logilab/common/textutils.py | 539 ------ .../logilab/common/tree.py | 369 ---- .../logilab/common/umessage.py | 177 -- .../logilab/common/ureports/__init__.py | 172 -- .../logilab/common/ureports/docbook_writer.py | 140 -- .../logilab/common/ureports/html_writer.py | 133 -- .../logilab/common/ureports/nodes.py | 203 --- .../logilab/common/ureports/text_writer.py | 145 -- .../logilab/common/urllib2ext.py | 89 - .../logilab/common/vcgutils.py | 216 --- .../logilab/common/visitor.py | 109 -- .../logilab/common/xmlutils.py | 61 - pymode/libs/logilab-common-1.4.1/setup.cfg | 9 - pymode/libs/logilab-common-1.4.1/setup.py | 54 - .../logilab-common-1.4.1/test/data/ChangeLog | 184 -- .../test/data/MyPyPa-0.1.0.zip | Bin 206 -> 0 bytes .../test/data/__init__.py | 0 .../test/data/__pkginfo__.py | 57 - .../test/data/content_differ_dir/NOTHING | 0 .../test/data/content_differ_dir/README | 1 - .../test/data/content_differ_dir/subdir/coin | 1 - .../data/content_differ_dir/subdir/toto.txt | 53 - .../test/data/deprecation.py | 4 - .../test/data/file_differ_dir/NOTHING | 0 .../test/data/file_differ_dir/README | 1 - .../test/data/file_differ_dir/subdir/toto.txt | 53 - .../test/data/file_differ_dir/subdirtwo/Hello | 0 .../test/data/find_test/__init__.py | 0 .../test/data/find_test/foo.txt | 0 .../test/data/find_test/module.py | 0 .../test/data/find_test/module2.py | 0 .../test/data/find_test/newlines.txt | 0 .../test/data/find_test/noendingnewline.py | 0 .../test/data/find_test/nonregr.py | 0 .../test/data/find_test/normal_file.txt | 0 .../test/data/find_test/spam.txt | 0 .../test/data/find_test/sub/doc.txt | 0 .../test/data/find_test/sub/momo.py | 0 .../test/data/find_test/test.ini | 0 .../test/data/find_test/test1.msg | 0 .../test/data/find_test/test2.msg | 0 .../data/find_test/write_protected_file.txt | 0 .../logilab-common-1.4.1/test/data/foo.txt | 9 - .../test/data/lmfp/__init__.py | 2 - .../test/data/lmfp/foo.py | 6 - .../logilab-common-1.4.1/test/data/module.py | 69 - .../logilab-common-1.4.1/test/data/module2.py | 77 - .../test/data/newlines.txt | 3 - .../test/data/noendingnewline.py | 36 - .../logilab-common-1.4.1/test/data/nonregr.py | 16 - .../test/data/normal_file.txt | 0 .../test/data/reference_dir/NOTHING | 0 .../test/data/reference_dir/README | 1 - .../test/data/reference_dir/subdir/coin | 1 - .../test/data/reference_dir/subdir/toto.txt | 53 - .../test/data/regobjects.py | 22 - .../test/data/regobjects2.py | 8 - .../test/data/same_dir/NOTHING | 0 .../test/data/same_dir/README | 1 - .../test/data/same_dir/subdir/coin | 1 - .../test/data/same_dir/subdir/toto.txt | 53 - .../logilab-common-1.4.1/test/data/spam.txt | 9 - .../test/data/sub/doc.txt | 1 - .../test/data/sub/momo.py | 3 - .../test/data/subdir_differ_dir/NOTHING | 0 .../test/data/subdir_differ_dir/README | 1 - .../test/data/subdir_differ_dir/subdir/coin | 1 - .../data/subdir_differ_dir/subdir/toto.txt | 53 - .../logilab-common-1.4.1/test/data/test.ini | 20 - .../logilab-common-1.4.1/test/data/test1.msg | 30 - .../logilab-common-1.4.1/test/data/test2.msg | 42 - .../test/data/write_protected_file.txt | 0 .../test/unittest_cache.py | 129 -- .../test/unittest_changelog.py | 40 - .../test/unittest_configuration.py | 509 ------ .../test/unittest_date.py | 206 --- .../test/unittest_decorators.py | 208 --- .../test/unittest_deprecation.py | 147 -- .../test/unittest_fileutils.py | 146 -- .../test/unittest_graph.py | 89 - .../test/unittest_interface.py | 87 - .../test/unittest_modutils.py | 296 --- .../test/unittest_pytest.py | 86 - .../test/unittest_registry.py | 220 --- .../test/unittest_shellutils.py | 235 --- .../test/unittest_table.py | 448 ----- .../test/unittest_taskqueue.py | 71 - .../test/unittest_testlib.py | 790 -------- .../test/unittest_textutils.py | 268 --- .../test/unittest_tree.py | 247 --- .../test/unittest_umessage.py | 94 - .../test/unittest_ureports_html.py | 63 - .../test/unittest_ureports_text.py | 104 -- .../test/unittest_xmlutils.py | 75 - .../libs/logilab-common-1.4.1/test/utils.py | 96 - 135 files changed, 21342 deletions(-) delete mode 120000 pymode/libs/logilab delete mode 100644 pymode/libs/logilab-common-1.4.1/COPYING delete mode 100644 pymode/libs/logilab-common-1.4.1/COPYING.LESSER delete mode 100644 pymode/libs/logilab-common-1.4.1/ChangeLog delete mode 100644 pymode/libs/logilab-common-1.4.1/MANIFEST.in delete mode 100644 pymode/libs/logilab-common-1.4.1/PKG-INFO delete mode 100644 pymode/libs/logilab-common-1.4.1/README delete mode 100644 pymode/libs/logilab-common-1.4.1/__pkginfo__.py delete mode 100755 pymode/libs/logilab-common-1.4.1/bin/logilab-pytest delete mode 100644 pymode/libs/logilab-common-1.4.1/bin/logilab-pytest.bat delete mode 100644 pymode/libs/logilab-common-1.4.1/doc/logilab-pytest.1 delete mode 100644 pymode/libs/logilab-common-1.4.1/doc/makefile delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/__init__.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/__init__.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/cache.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/changelog.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/clcommands.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/compat.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/configuration.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/daemon.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/date.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/debugger.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/decorators.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/deprecation.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/fileutils.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/graph.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/interface.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/logging_ext.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/modutils.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/optik_ext.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/optparser.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/proc.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/pytest.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/registry.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/shellutils.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/sphinx_ext.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/sphinxutils.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/table.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/tasksqueue.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/testlib.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/textutils.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/tree.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/umessage.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/ureports/__init__.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/ureports/docbook_writer.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/ureports/html_writer.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/ureports/nodes.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/ureports/text_writer.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/urllib2ext.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/vcgutils.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/visitor.py delete mode 100644 pymode/libs/logilab-common-1.4.1/logilab/common/xmlutils.py delete mode 100644 pymode/libs/logilab-common-1.4.1/setup.cfg delete mode 100644 pymode/libs/logilab-common-1.4.1/setup.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/ChangeLog delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/MyPyPa-0.1.0.zip delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/__init__.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/__pkginfo__.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/NOTHING delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/README delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/subdir/coin delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/subdir/toto.txt delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/deprecation.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/NOTHING delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/README delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/subdir/toto.txt delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/subdirtwo/Hello delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/__init__.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/foo.txt delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/module.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/module2.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/newlines.txt delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/noendingnewline.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/nonregr.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/normal_file.txt delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/spam.txt delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/sub/doc.txt delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/sub/momo.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/test.ini delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/test1.msg delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/test2.msg delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/find_test/write_protected_file.txt delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/foo.txt delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/lmfp/__init__.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/lmfp/foo.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/module.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/module2.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/newlines.txt delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/noendingnewline.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/nonregr.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/normal_file.txt delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/reference_dir/NOTHING delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/reference_dir/README delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/reference_dir/subdir/coin delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/reference_dir/subdir/toto.txt delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/regobjects.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/regobjects2.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/same_dir/NOTHING delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/same_dir/README delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/same_dir/subdir/coin delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/same_dir/subdir/toto.txt delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/spam.txt delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/sub/doc.txt delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/sub/momo.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/NOTHING delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/README delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/subdir/coin delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/subdir/toto.txt delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/test.ini delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/test1.msg delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/test2.msg delete mode 100644 pymode/libs/logilab-common-1.4.1/test/data/write_protected_file.txt delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_cache.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_changelog.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_configuration.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_date.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_decorators.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_deprecation.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_fileutils.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_graph.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_interface.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_modutils.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_pytest.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_registry.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_shellutils.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_table.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_taskqueue.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_testlib.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_textutils.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_tree.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_umessage.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_ureports_html.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_ureports_text.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/unittest_xmlutils.py delete mode 100644 pymode/libs/logilab-common-1.4.1/test/utils.py diff --git a/pymode/libs/logilab b/pymode/libs/logilab deleted file mode 120000 index 1100ab45..00000000 --- a/pymode/libs/logilab +++ /dev/null @@ -1 +0,0 @@ -logilab-common-1.4.1/logilab \ No newline at end of file diff --git a/pymode/libs/logilab-common-1.4.1/COPYING b/pymode/libs/logilab-common-1.4.1/COPYING deleted file mode 100644 index d511905c..00000000 --- a/pymode/libs/logilab-common-1.4.1/COPYING +++ /dev/null @@ -1,339 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 2, June 1991 - - Copyright (C) 1989, 1991 Free Software Foundation, Inc., - 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The licenses for most software are designed to take away your -freedom to share and change it. By contrast, the GNU General Public -License is intended to guarantee your freedom to share and change free -software--to make sure the software is free for all its users. This -General Public License applies to most of the Free Software -Foundation's software and to any other program whose authors commit to -using it. (Some other Free Software Foundation software is covered by -the GNU Lesser General Public License instead.) You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -this service if you wish), that you receive source code or can get it -if you want it, that you can change the software or use pieces of it -in new free programs; and that you know you can do these things. - - To protect your rights, we need to make restrictions that forbid -anyone to deny you these rights or to ask you to surrender the rights. -These restrictions translate to certain responsibilities for you if you -distribute copies of the software, or if you modify it. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must give the recipients all the rights that -you have. You must make sure that they, too, receive or can get the -source code. And you must show them these terms so they know their -rights. - - We protect your rights with two steps: (1) copyright the software, and -(2) offer you this license which gives you legal permission to copy, -distribute and/or modify the software. - - Also, for each author's protection and ours, we want to make certain -that everyone understands that there is no warranty for this free -software. If the software is modified by someone else and passed on, we -want its recipients to know that what they have is not the original, so -that any problems introduced by others will not reflect on the original -authors' reputations. - - Finally, any free program is threatened constantly by software -patents. We wish to avoid the danger that redistributors of a free -program will individually obtain patent licenses, in effect making the -program proprietary. To prevent this, we have made it clear that any -patent must be licensed for everyone's free use or not licensed at all. - - The precise terms and conditions for copying, distribution and -modification follow. - - GNU GENERAL PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. This License applies to any program or other work which contains -a notice placed by the copyright holder saying it may be distributed -under the terms of this General Public License. The "Program", below, -refers to any such program or work, and a "work based on the Program" -means either the Program or any derivative work under copyright law: -that is to say, a work containing the Program or a portion of it, -either verbatim or with modifications and/or translated into another -language. (Hereinafter, translation is included without limitation in -the term "modification".) Each licensee is addressed as "you". - -Activities other than copying, distribution and modification are not -covered by this License; they are outside its scope. The act of -running the Program is not restricted, and the output from the Program -is covered only if its contents constitute a work based on the -Program (independent of having been made by running the Program). -Whether that is true depends on what the Program does. - - 1. You may copy and distribute verbatim copies of the Program's -source code as you receive it, in any medium, provided that you -conspicuously and appropriately publish on each copy an appropriate -copyright notice and disclaimer of warranty; keep intact all the -notices that refer to this License and to the absence of any warranty; -and give any other recipients of the Program a copy of this License -along with the Program. - -You may charge a fee for the physical act of transferring a copy, and -you may at your option offer warranty protection in exchange for a fee. - - 2. You may modify your copy or copies of the Program or any portion -of it, thus forming a work based on the Program, and copy and -distribute such modifications or work under the terms of Section 1 -above, provided that you also meet all of these conditions: - - a) You must cause the modified files to carry prominent notices - stating that you changed the files and the date of any change. - - b) You must cause any work that you distribute or publish, that in - whole or in part contains or is derived from the Program or any - part thereof, to be licensed as a whole at no charge to all third - parties under the terms of this License. - - c) If the modified program normally reads commands interactively - when run, you must cause it, when started running for such - interactive use in the most ordinary way, to print or display an - announcement including an appropriate copyright notice and a - notice that there is no warranty (or else, saying that you provide - a warranty) and that users may redistribute the program under - these conditions, and telling the user how to view a copy of this - License. (Exception: if the Program itself is interactive but - does not normally print such an announcement, your work based on - the Program is not required to print an announcement.) - -These requirements apply to the modified work as a whole. If -identifiable sections of that work are not derived from the Program, -and can be reasonably considered independent and separate works in -themselves, then this License, and its terms, do not apply to those -sections when you distribute them as separate works. But when you -distribute the same sections as part of a whole which is a work based -on the Program, the distribution of the whole must be on the terms of -this License, whose permissions for other licensees extend to the -entire whole, and thus to each and every part regardless of who wrote it. - -Thus, it is not the intent of this section to claim rights or contest -your rights to work written entirely by you; rather, the intent is to -exercise the right to control the distribution of derivative or -collective works based on the Program. - -In addition, mere aggregation of another work not based on the Program -with the Program (or with a work based on the Program) on a volume of -a storage or distribution medium does not bring the other work under -the scope of this License. - - 3. You may copy and distribute the Program (or a work based on it, -under Section 2) in object code or executable form under the terms of -Sections 1 and 2 above provided that you also do one of the following: - - a) Accompany it with the complete corresponding machine-readable - source code, which must be distributed under the terms of Sections - 1 and 2 above on a medium customarily used for software interchange; or, - - b) Accompany it with a written offer, valid for at least three - years, to give any third party, for a charge no more than your - cost of physically performing source distribution, a complete - machine-readable copy of the corresponding source code, to be - distributed under the terms of Sections 1 and 2 above on a medium - customarily used for software interchange; or, - - c) Accompany it with the information you received as to the offer - to distribute corresponding source code. (This alternative is - allowed only for noncommercial distribution and only if you - received the program in object code or executable form with such - an offer, in accord with Subsection b above.) - -The source code for a work means the preferred form of the work for -making modifications to it. For an executable work, complete source -code means all the source code for all modules it contains, plus any -associated interface definition files, plus the scripts used to -control compilation and installation of the executable. However, as a -special exception, the source code distributed need not include -anything that is normally distributed (in either source or binary -form) with the major components (compiler, kernel, and so on) of the -operating system on which the executable runs, unless that component -itself accompanies the executable. - -If distribution of executable or object code is made by offering -access to copy from a designated place, then offering equivalent -access to copy the source code from the same place counts as -distribution of the source code, even though third parties are not -compelled to copy the source along with the object code. - - 4. You may not copy, modify, sublicense, or distribute the Program -except as expressly provided under this License. Any attempt -otherwise to copy, modify, sublicense or distribute the Program is -void, and will automatically terminate your rights under this License. -However, parties who have received copies, or rights, from you under -this License will not have their licenses terminated so long as such -parties remain in full compliance. - - 5. You are not required to accept this License, since you have not -signed it. However, nothing else grants you permission to modify or -distribute the Program or its derivative works. These actions are -prohibited by law if you do not accept this License. Therefore, by -modifying or distributing the Program (or any work based on the -Program), you indicate your acceptance of this License to do so, and -all its terms and conditions for copying, distributing or modifying -the Program or works based on it. - - 6. Each time you redistribute the Program (or any work based on the -Program), the recipient automatically receives a license from the -original licensor to copy, distribute or modify the Program subject to -these terms and conditions. You may not impose any further -restrictions on the recipients' exercise of the rights granted herein. -You are not responsible for enforcing compliance by third parties to -this License. - - 7. If, as a consequence of a court judgment or allegation of patent -infringement or for any other reason (not limited to patent issues), -conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot -distribute so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you -may not distribute the Program at all. For example, if a patent -license would not permit royalty-free redistribution of the Program by -all those who receive copies directly or indirectly through you, then -the only way you could satisfy both it and this License would be to -refrain entirely from distribution of the Program. - -If any portion of this section is held invalid or unenforceable under -any particular circumstance, the balance of the section is intended to -apply and the section as a whole is intended to apply in other -circumstances. - -It is not the purpose of this section to induce you to infringe any -patents or other property right claims or to contest validity of any -such claims; this section has the sole purpose of protecting the -integrity of the free software distribution system, which is -implemented by public license practices. Many people have made -generous contributions to the wide range of software distributed -through that system in reliance on consistent application of that -system; it is up to the author/donor to decide if he or she is willing -to distribute software through any other system and a licensee cannot -impose that choice. - -This section is intended to make thoroughly clear what is believed to -be a consequence of the rest of this License. - - 8. If the distribution and/or use of the Program is restricted in -certain countries either by patents or by copyrighted interfaces, the -original copyright holder who places the Program under this License -may add an explicit geographical distribution limitation excluding -those countries, so that distribution is permitted only in or among -countries not thus excluded. In such case, this License incorporates -the limitation as if written in the body of this License. - - 9. The Free Software Foundation may publish revised and/or new versions -of the General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - -Each version is given a distinguishing version number. If the Program -specifies a version number of this License which applies to it and "any -later version", you have the option of following the terms and conditions -either of that version or of any later version published by the Free -Software Foundation. If the Program does not specify a version number of -this License, you may choose any version ever published by the Free Software -Foundation. - - 10. If you wish to incorporate parts of the Program into other free -programs whose distribution conditions are different, write to the author -to ask for permission. For software which is copyrighted by the Free -Software Foundation, write to the Free Software Foundation; we sometimes -make exceptions for this. Our decision will be guided by the two goals -of preserving the free status of all derivatives of our free software and -of promoting the sharing and reuse of software generally. - - NO WARRANTY - - 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY -FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN -OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES -PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED -OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS -TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE -PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, -REPAIR OR CORRECTION. - - 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR -REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, -INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING -OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED -TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY -YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER -PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE -POSSIBILITY OF SUCH DAMAGES. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -convey the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 2 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License along - with this program; if not, write to the Free Software Foundation, Inc., - 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -Also add information on how to contact you by electronic and paper mail. - -If the program is interactive, make it output a short notice like this -when it starts in an interactive mode: - - Gnomovision version 69, Copyright (C) year name of author - Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, the commands you use may -be called something other than `show w' and `show c'; they could even be -mouse-clicks or menu items--whatever suits your program. - -You should also get your employer (if you work as a programmer) or your -school, if any, to sign a "copyright disclaimer" for the program, if -necessary. Here is a sample; alter the names: - - Yoyodyne, Inc., hereby disclaims all copyright interest in the program - `Gnomovision' (which makes passes at compilers) written by James Hacker. - - , 1 April 1989 - Ty Coon, President of Vice - -This General Public License does not permit incorporating your program into -proprietary programs. If your program is a subroutine library, you may -consider it more useful to permit linking proprietary applications with the -library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. diff --git a/pymode/libs/logilab-common-1.4.1/COPYING.LESSER b/pymode/libs/logilab-common-1.4.1/COPYING.LESSER deleted file mode 100644 index 2d2d780e..00000000 --- a/pymode/libs/logilab-common-1.4.1/COPYING.LESSER +++ /dev/null @@ -1,510 +0,0 @@ - - GNU LESSER GENERAL PUBLIC LICENSE - Version 2.1, February 1999 - - Copyright (C) 1991, 1999 Free Software Foundation, Inc. - 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - -[This is the first released version of the Lesser GPL. It also counts - as the successor of the GNU Library Public License, version 2, hence - the version number 2.1.] - - Preamble - - The licenses for most software are designed to take away your -freedom to share and change it. By contrast, the GNU General Public -Licenses are intended to guarantee your freedom to share and change -free software--to make sure the software is free for all its users. - - This license, the Lesser General Public License, applies to some -specially designated software packages--typically libraries--of the -Free Software Foundation and other authors who decide to use it. You -can use it too, but we suggest you first think carefully about whether -this license or the ordinary General Public License is the better -strategy to use in any particular case, based on the explanations -below. - - When we speak of free software, we are referring to freedom of use, -not price. Our General Public Licenses are designed to make sure that -you have the freedom to distribute copies of free software (and charge -for this service if you wish); that you receive source code or can get -it if you want it; that you can change the software and use pieces of -it in new free programs; and that you are informed that you can do -these things. - - To protect your rights, we need to make restrictions that forbid -distributors to deny you these rights or to ask you to surrender these -rights. These restrictions translate to certain responsibilities for -you if you distribute copies of the library or if you modify it. - - For example, if you distribute copies of the library, whether gratis -or for a fee, you must give the recipients all the rights that we gave -you. You must make sure that they, too, receive or can get the source -code. If you link other code with the library, you must provide -complete object files to the recipients, so that they can relink them -with the library after making changes to the library and recompiling -it. And you must show them these terms so they know their rights. - - We protect your rights with a two-step method: (1) we copyright the -library, and (2) we offer you this license, which gives you legal -permission to copy, distribute and/or modify the library. - - To protect each distributor, we want to make it very clear that -there is no warranty for the free library. Also, if the library is -modified by someone else and passed on, the recipients should know -that what they have is not the original version, so that the original -author's reputation will not be affected by problems that might be -introduced by others. - - Finally, software patents pose a constant threat to the existence of -any free program. We wish to make sure that a company cannot -effectively restrict the users of a free program by obtaining a -restrictive license from a patent holder. Therefore, we insist that -any patent license obtained for a version of the library must be -consistent with the full freedom of use specified in this license. - - Most GNU software, including some libraries, is covered by the -ordinary GNU General Public License. This license, the GNU Lesser -General Public License, applies to certain designated libraries, and -is quite different from the ordinary General Public License. We use -this license for certain libraries in order to permit linking those -libraries into non-free programs. - - When a program is linked with a library, whether statically or using -a shared library, the combination of the two is legally speaking a -combined work, a derivative of the original library. The ordinary -General Public License therefore permits such linking only if the -entire combination fits its criteria of freedom. The Lesser General -Public License permits more lax criteria for linking other code with -the library. - - We call this license the "Lesser" General Public License because it -does Less to protect the user's freedom than the ordinary General -Public License. It also provides other free software developers Less -of an advantage over competing non-free programs. These disadvantages -are the reason we use the ordinary General Public License for many -libraries. However, the Lesser license provides advantages in certain -special circumstances. - - For example, on rare occasions, there may be a special need to -encourage the widest possible use of a certain library, so that it -becomes a de-facto standard. To achieve this, non-free programs must -be allowed to use the library. A more frequent case is that a free -library does the same job as widely used non-free libraries. In this -case, there is little to gain by limiting the free library to free -software only, so we use the Lesser General Public License. - - In other cases, permission to use a particular library in non-free -programs enables a greater number of people to use a large body of -free software. For example, permission to use the GNU C Library in -non-free programs enables many more people to use the whole GNU -operating system, as well as its variant, the GNU/Linux operating -system. - - Although the Lesser General Public License is Less protective of the -users' freedom, it does ensure that the user of a program that is -linked with the Library has the freedom and the wherewithal to run -that program using a modified version of the Library. - - The precise terms and conditions for copying, distribution and -modification follow. Pay close attention to the difference between a -"work based on the library" and a "work that uses the library". The -former contains code derived from the library, whereas the latter must -be combined with the library in order to run. - - GNU LESSER GENERAL PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. This License Agreement applies to any software library or other -program which contains a notice placed by the copyright holder or -other authorized party saying it may be distributed under the terms of -this Lesser General Public License (also called "this License"). -Each licensee is addressed as "you". - - A "library" means a collection of software functions and/or data -prepared so as to be conveniently linked with application programs -(which use some of those functions and data) to form executables. - - The "Library", below, refers to any such software library or work -which has been distributed under these terms. A "work based on the -Library" means either the Library or any derivative work under -copyright law: that is to say, a work containing the Library or a -portion of it, either verbatim or with modifications and/or translated -straightforwardly into another language. (Hereinafter, translation is -included without limitation in the term "modification".) - - "Source code" for a work means the preferred form of the work for -making modifications to it. For a library, complete source code means -all the source code for all modules it contains, plus any associated -interface definition files, plus the scripts used to control -compilation and installation of the library. - - Activities other than copying, distribution and modification are not -covered by this License; they are outside its scope. The act of -running a program using the Library is not restricted, and output from -such a program is covered only if its contents constitute a work based -on the Library (independent of the use of the Library in a tool for -writing it). Whether that is true depends on what the Library does -and what the program that uses the Library does. - - 1. You may copy and distribute verbatim copies of the Library's -complete source code as you receive it, in any medium, provided that -you conspicuously and appropriately publish on each copy an -appropriate copyright notice and disclaimer of warranty; keep intact -all the notices that refer to this License and to the absence of any -warranty; and distribute a copy of this License along with the -Library. - - You may charge a fee for the physical act of transferring a copy, -and you may at your option offer warranty protection in exchange for a -fee. - - 2. You may modify your copy or copies of the Library or any portion -of it, thus forming a work based on the Library, and copy and -distribute such modifications or work under the terms of Section 1 -above, provided that you also meet all of these conditions: - - a) The modified work must itself be a software library. - - b) You must cause the files modified to carry prominent notices - stating that you changed the files and the date of any change. - - c) You must cause the whole of the work to be licensed at no - charge to all third parties under the terms of this License. - - d) If a facility in the modified Library refers to a function or a - table of data to be supplied by an application program that uses - the facility, other than as an argument passed when the facility - is invoked, then you must make a good faith effort to ensure that, - in the event an application does not supply such function or - table, the facility still operates, and performs whatever part of - its purpose remains meaningful. - - (For example, a function in a library to compute square roots has - a purpose that is entirely well-defined independent of the - application. Therefore, Subsection 2d requires that any - application-supplied function or table used by this function must - be optional: if the application does not supply it, the square - root function must still compute square roots.) - -These requirements apply to the modified work as a whole. If -identifiable sections of that work are not derived from the Library, -and can be reasonably considered independent and separate works in -themselves, then this License, and its terms, do not apply to those -sections when you distribute them as separate works. But when you -distribute the same sections as part of a whole which is a work based -on the Library, the distribution of the whole must be on the terms of -this License, whose permissions for other licensees extend to the -entire whole, and thus to each and every part regardless of who wrote -it. - -Thus, it is not the intent of this section to claim rights or contest -your rights to work written entirely by you; rather, the intent is to -exercise the right to control the distribution of derivative or -collective works based on the Library. - -In addition, mere aggregation of another work not based on the Library -with the Library (or with a work based on the Library) on a volume of -a storage or distribution medium does not bring the other work under -the scope of this License. - - 3. You may opt to apply the terms of the ordinary GNU General Public -License instead of this License to a given copy of the Library. To do -this, you must alter all the notices that refer to this License, so -that they refer to the ordinary GNU General Public License, version 2, -instead of to this License. (If a newer version than version 2 of the -ordinary GNU General Public License has appeared, then you can specify -that version instead if you wish.) Do not make any other change in -these notices. - - Once this change is made in a given copy, it is irreversible for -that copy, so the ordinary GNU General Public License applies to all -subsequent copies and derivative works made from that copy. - - This option is useful when you wish to copy part of the code of -the Library into a program that is not a library. - - 4. You may copy and distribute the Library (or a portion or -derivative of it, under Section 2) in object code or executable form -under the terms of Sections 1 and 2 above provided that you accompany -it with the complete corresponding machine-readable source code, which -must be distributed under the terms of Sections 1 and 2 above on a -medium customarily used for software interchange. - - If distribution of object code is made by offering access to copy -from a designated place, then offering equivalent access to copy the -source code from the same place satisfies the requirement to -distribute the source code, even though third parties are not -compelled to copy the source along with the object code. - - 5. A program that contains no derivative of any portion of the -Library, but is designed to work with the Library by being compiled or -linked with it, is called a "work that uses the Library". Such a -work, in isolation, is not a derivative work of the Library, and -therefore falls outside the scope of this License. - - However, linking a "work that uses the Library" with the Library -creates an executable that is a derivative of the Library (because it -contains portions of the Library), rather than a "work that uses the -library". The executable is therefore covered by this License. -Section 6 states terms for distribution of such executables. - - When a "work that uses the Library" uses material from a header file -that is part of the Library, the object code for the work may be a -derivative work of the Library even though the source code is not. -Whether this is true is especially significant if the work can be -linked without the Library, or if the work is itself a library. The -threshold for this to be true is not precisely defined by law. - - If such an object file uses only numerical parameters, data -structure layouts and accessors, and small macros and small inline -functions (ten lines or less in length), then the use of the object -file is unrestricted, regardless of whether it is legally a derivative -work. (Executables containing this object code plus portions of the -Library will still fall under Section 6.) - - Otherwise, if the work is a derivative of the Library, you may -distribute the object code for the work under the terms of Section 6. -Any executables containing that work also fall under Section 6, -whether or not they are linked directly with the Library itself. - - 6. As an exception to the Sections above, you may also combine or -link a "work that uses the Library" with the Library to produce a -work containing portions of the Library, and distribute that work -under terms of your choice, provided that the terms permit -modification of the work for the customer's own use and reverse -engineering for debugging such modifications. - - You must give prominent notice with each copy of the work that the -Library is used in it and that the Library and its use are covered by -this License. You must supply a copy of this License. If the work -during execution displays copyright notices, you must include the -copyright notice for the Library among them, as well as a reference -directing the user to the copy of this License. Also, you must do one -of these things: - - a) Accompany the work with the complete corresponding - machine-readable source code for the Library including whatever - changes were used in the work (which must be distributed under - Sections 1 and 2 above); and, if the work is an executable linked - with the Library, with the complete machine-readable "work that - uses the Library", as object code and/or source code, so that the - user can modify the Library and then relink to produce a modified - executable containing the modified Library. (It is understood - that the user who changes the contents of definitions files in the - Library will not necessarily be able to recompile the application - to use the modified definitions.) - - b) Use a suitable shared library mechanism for linking with the - Library. A suitable mechanism is one that (1) uses at run time a - copy of the library already present on the user's computer system, - rather than copying library functions into the executable, and (2) - will operate properly with a modified version of the library, if - the user installs one, as long as the modified version is - interface-compatible with the version that the work was made with. - - c) Accompany the work with a written offer, valid for at least - three years, to give the same user the materials specified in - Subsection 6a, above, for a charge no more than the cost of - performing this distribution. - - d) If distribution of the work is made by offering access to copy - from a designated place, offer equivalent access to copy the above - specified materials from the same place. - - e) Verify that the user has already received a copy of these - materials or that you have already sent this user a copy. - - For an executable, the required form of the "work that uses the -Library" must include any data and utility programs needed for -reproducing the executable from it. However, as a special exception, -the materials to be distributed need not include anything that is -normally distributed (in either source or binary form) with the major -components (compiler, kernel, and so on) of the operating system on -which the executable runs, unless that component itself accompanies -the executable. - - It may happen that this requirement contradicts the license -restrictions of other proprietary libraries that do not normally -accompany the operating system. Such a contradiction means you cannot -use both them and the Library together in an executable that you -distribute. - - 7. You may place library facilities that are a work based on the -Library side-by-side in a single library together with other library -facilities not covered by this License, and distribute such a combined -library, provided that the separate distribution of the work based on -the Library and of the other library facilities is otherwise -permitted, and provided that you do these two things: - - a) Accompany the combined library with a copy of the same work - based on the Library, uncombined with any other library - facilities. This must be distributed under the terms of the - Sections above. - - b) Give prominent notice with the combined library of the fact - that part of it is a work based on the Library, and explaining - where to find the accompanying uncombined form of the same work. - - 8. You may not copy, modify, sublicense, link with, or distribute -the Library except as expressly provided under this License. Any -attempt otherwise to copy, modify, sublicense, link with, or -distribute the Library is void, and will automatically terminate your -rights under this License. However, parties who have received copies, -or rights, from you under this License will not have their licenses -terminated so long as such parties remain in full compliance. - - 9. You are not required to accept this License, since you have not -signed it. However, nothing else grants you permission to modify or -distribute the Library or its derivative works. These actions are -prohibited by law if you do not accept this License. Therefore, by -modifying or distributing the Library (or any work based on the -Library), you indicate your acceptance of this License to do so, and -all its terms and conditions for copying, distributing or modifying -the Library or works based on it. - - 10. Each time you redistribute the Library (or any work based on the -Library), the recipient automatically receives a license from the -original licensor to copy, distribute, link with or modify the Library -subject to these terms and conditions. You may not impose any further -restrictions on the recipients' exercise of the rights granted herein. -You are not responsible for enforcing compliance by third parties with -this License. - - 11. If, as a consequence of a court judgment or allegation of patent -infringement or for any other reason (not limited to patent issues), -conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot -distribute so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you -may not distribute the Library at all. For example, if a patent -license would not permit royalty-free redistribution of the Library by -all those who receive copies directly or indirectly through you, then -the only way you could satisfy both it and this License would be to -refrain entirely from distribution of the Library. - -If any portion of this section is held invalid or unenforceable under -any particular circumstance, the balance of the section is intended to -apply, and the section as a whole is intended to apply in other -circumstances. - -It is not the purpose of this section to induce you to infringe any -patents or other property right claims or to contest validity of any -such claims; this section has the sole purpose of protecting the -integrity of the free software distribution system which is -implemented by public license practices. Many people have made -generous contributions to the wide range of software distributed -through that system in reliance on consistent application of that -system; it is up to the author/donor to decide if he or she is willing -to distribute software through any other system and a licensee cannot -impose that choice. - -This section is intended to make thoroughly clear what is believed to -be a consequence of the rest of this License. - - 12. If the distribution and/or use of the Library is restricted in -certain countries either by patents or by copyrighted interfaces, the -original copyright holder who places the Library under this License -may add an explicit geographical distribution limitation excluding those -countries, so that distribution is permitted only in or among -countries not thus excluded. In such case, this License incorporates -the limitation as if written in the body of this License. - - 13. The Free Software Foundation may publish revised and/or new -versions of the Lesser General Public License from time to time. -Such new versions will be similar in spirit to the present version, -but may differ in detail to address new problems or concerns. - -Each version is given a distinguishing version number. If the Library -specifies a version number of this License which applies to it and -"any later version", you have the option of following the terms and -conditions either of that version or of any later version published by -the Free Software Foundation. If the Library does not specify a -license version number, you may choose any version ever published by -the Free Software Foundation. - - 14. If you wish to incorporate parts of the Library into other free -programs whose distribution conditions are incompatible with these, -write to the author to ask for permission. For software which is -copyrighted by the Free Software Foundation, write to the Free -Software Foundation; we sometimes make exceptions for this. Our -decision will be guided by the two goals of preserving the free status -of all derivatives of our free software and of promoting the sharing -and reuse of software generally. - - NO WARRANTY - - 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO -WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. -EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR -OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY -KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE -LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME -THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN -WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY -AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU -FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR -CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE -LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING -RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A -FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF -SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH -DAMAGES. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Libraries - - If you develop a new library, and you want it to be of the greatest -possible use to the public, we recommend making it free software that -everyone can redistribute and change. You can do so by permitting -redistribution under these terms (or, alternatively, under the terms -of the ordinary General Public License). - - To apply these terms, attach the following notices to the library. -It is safest to attach them to the start of each source file to most -effectively convey the exclusion of warranty; and each file should -have at least the "copyright" line and a pointer to where the full -notice is found. - - - - Copyright (C) - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA - -Also add information on how to contact you by electronic and paper mail. - -You should also get your employer (if you work as a programmer) or -your school, if any, to sign a "copyright disclaimer" for the library, -if necessary. Here is a sample; alter the names: - - Yoyodyne, Inc., hereby disclaims all copyright interest in the - library `Frob' (a library for tweaking knobs) written by James - Random Hacker. - - , 1 April 1990 - Ty Coon, President of Vice - -That's all there is to it! - - diff --git a/pymode/libs/logilab-common-1.4.1/ChangeLog b/pymode/libs/logilab-common-1.4.1/ChangeLog deleted file mode 100644 index 95c96f6a..00000000 --- a/pymode/libs/logilab-common-1.4.1/ChangeLog +++ /dev/null @@ -1,1613 +0,0 @@ -ChangeLog for logilab.common -============================ - -2016-10-03 -- 1.3.0 - - * pytest: executable deprecated and renamed as logilab-pytest to prevent - conflict with pytest provided by http://pytest.org/ - -2016-03-15 -- 1.2.0 - - * pytest: TraceController class, pause_tracing and resume_tracing - functions, deprecated from 0.63.1, got removed. The nocoverage - and pause_trace utilities are now available from the testlib - module rather than pytest. - - * date: datetime2ticks uses the milliseconds from the datetime objects - -2015-10-12 -- 1.1.0 - * configuration: have a stable order for sections (#298658) - - * testlib: clean out deprecated TestCase methods (#1716063), move pytest - specifics to pytest.py (#1716053) - - * fix a few python3 bugs in umessage, configuration and optik_ext modules - - * testlib: report failures and skips in generative tests properly - - * optik_ext: return bytes as ints and not floats (#2086835) - -2015-07-08 -- 1.0.2 - * declare setuptools requirement in __pkginfo__/setup.py - - * randomize order of test modules in pytest -t - -2015-07-01 -- 1.0.1 - * restore __pkginfo__.version, which pylint < 1.4.4 uses - -2015-06-30 -- 1.0.0 - * remove unused/deprecated modules: cli, contexts, corbautils, dbf, - pyro_ext, xmlrpcutils. __pkginfo__ is no longer installed. - - * major layout change - - * use setuptools exclusively - - * 'logilab' is now a proper namespace package - - * modutils: basic support for namespace packages - - * registry: ambiguous selects now raise a specific exception - - * testlib: better support for non-pytest launchers - - * testlib: Tags() now work with py3k - -2014-11-30 -- 0.63.2 - * fix 2 minor regressions from 0.63.1 - -2014-11-28 -- 0.63.1 - * fix fallout from py3k conversion - - * pytest: fix TestSuite.run wrapper (#280806) - - * daemon: change umask after creating pid file - -2014-11-05 -- 0.63.0 - * drop compatibility with python <= 2.5 (#264017) - - * fix textutils.py doctests for py3k - - * produce a clearer exception when dot is not installed (#253516) - - * make source python3-compatible (3.3+), without using 2to3. This - introduces a dependency on six (#265740) - - * fix umessage header decoding on python 3.3 and newer (#149345) - - * WARNING: the compat module no longer exports 'callable', 'izip', 'imap', - 'chain', 'sum', 'enumerate', 'frozenset', 'reversed', 'sorted', 'max', - 'relpath', 'InheritableSet', or any subprocess-related names. - -2014-07-30 -- 0.62.1 - * shellutils: restore py 2.5 compat by removing usage of class decorator - - * pytest: drop broken --coverage option - - * testlib: support for skipping whole test class and conditional skip, don't - run setUp for skipped tests - - * configuration: load options in config file order (#185648) - - - -2014-03-07 -- 0.62.0 - * modutils: cleanup_sys_modules returns the list of cleaned modules - - - -2014-02-11 -- 0.61.0 - * pdf_ext: removed, it had no known users (CVE-2014-1838) - - * shellutils: fix tempfile issue in Execute, and deprecate it - (CVE-2014-1839) - - * pytest: use 'env' to run the python interpreter - - * graph: ensure output is ordered on node and graph ids (#202314) - - - -2013-16-12 -- 0.60.1 - * modutils: - - * don't propagate IOError when package's __init__.py file doesn't - exist (#174606) - - * ensure file is closed, may cause pb depending on the interpreter, eg - pypy) (#180876) - - * fix support for `extend_path` based nested namespace packages ; - Report and patch by John Johnson (#177651) - - * fix some cases of failing python3 install on windows platform / cross - compilation (#180836) - - - -2013-07-26 -- 0.60.0 - * configuration: rename option_name method into option_attrname (#140667) - - * deprecation: new DeprecationManager class (closes #108205) - - * modutils: - - - fix typo causing name error in python3 / bad message in python2 - (#136037) - - fix python3.3 crash in file_from_modpath due to implementation - change of imp.find_module wrt builtin modules (#137244) - - * testlib: use assertCountEqual instead of assertSameElements/assertItemsEqual - (deprecated), fixing crash with python 3.3 (#144526) - - * graph: use codecs.open avoid crash when writing utf-8 data under python3 - (#155138) - - - -2013-04-16 -- 0.59.1 - * graph: added pruning of the recursive search tree for detecting cycles in - graphs (closes #2469) - - * testlib: check for generators in with_tempdir (closes #117533) - - * registry: - - - select_or_none should not silent ObjectNotFound exception - (closes #119819) - - remove 2 accidentally introduced tabs breaking python 3 compat - (closes #117580) - - * fix umessages test w/ python 3 and LC_ALL=C (closes #119967, report and - patch by Ian Delaney) - - - -2013-01-21 -- 0.59.0 - * registry: - - - introduce RegistrableObject base class, mandatory to make - classes automatically registrable, and cleanup code - accordingly - - introduce objid and objname methods on Registry instead of - classid function and inlined code plus other refactorings to allow - arbitrary objects to be registered, provided they inherit from new - RegistrableInstance class (closes #98742) - - deprecate usage of leading underscore to skip object registration, using - __abstract__ explicitly is better and notion of registered object 'name' - is now somewhat fuzzy - - use register_all when no registration callback defined (closes #111011) - - * logging_ext: on windows, use colorama to display colored logs, if available (closes #107436) - - * packaging: remove references to ftp at logilab - - * deprecations: really check them - - * packaging: steal spec file from fedora (closes #113099) - - * packaging force python2.6 on rhel5 (closes #113099) - - * packaging Update download and project urls (closes #113099) - - * configuration: enhance merge_options function (closes #113458) - - * decorators: fix @monkeypatch decorator contract for dark corner - cases such as monkeypatching of a callable instance: no more - turned into an unbound method, which was broken in python 3 and - probably not used anywhere (actually closes #104047). - - - -2012-11-14 -- 0.58.3 - * date: fix ustrftime() impl. for python3 (closes #82161, patch by Arfrever - Frehtes Taifersar Arahesis) and encoding detection for python2 (closes - #109740) - - * other python3 code and test fixes (closes #104047) - - * registry: Store.setdefault shouldn't raise RegistryNotFound (closes #111010) - - * table: stop encoding to iso-8859-1, use unicode (closes #105847) - - * setup: properly install additional files during build instead of install (closes #104045) - - - -2012-07-30 -- 0.58.2 - * modutils: fixes (closes #100757 and #100935) - - - -2012-07-17 -- 0.58.1 - * modutils, testlib: be more python implementation independant (closes #99493 and #99627) - - - -2012-04-12 -- 0.58.0 - * new `registry` module containing a backport of CubicWeb selectable objects registry (closes #84654) - - * testlib: DocTestCase fix builtins pollution after doctest execution. - - * shellutil: add argument to ``ProgressBar.update`` to tune cursor progression (closes #88981) - - * deprecated: new DeprecationWrapper class (closes #88942) - - - -2012-03-22 -- 0.57.2 - * texutils: apply_units raise ValueError if string isn'nt valid (closes #88808) - - * daemon: don't call putenv directly - - * pytest: do not enable extra warning other than DeprecationWarning. - - * testlib: DocTestCase fix builtins pollution after doctest execution. - - * testlib: replace sys.exit with raise ImportError (closes: #84159) - - * fix license in README - - * add trove classifiers (tell about python 3 support for pypi) - - - -2011-10-28 -- 0.57.1 - * daemon: change $HOME after dropping privileges (closes #81297) - - * compat: method_type for py3k use instance of the class to have a - real instance method (closes: #79268) - - - -2011-10-12 -- 0.57.0 - * only install unittest2 when python version < 2.7 (closes: #76068) - - * daemon: make pidfile world-readable (closes #75968) - - * daemon: remove unused(?) DaemonMixin class - - * update compat module for callable() and method_type() - - * decorators: fix monkeypatch py3k compat (closes #75290) - - * decorators: provide a @cachedproperty decorator - - - -2011-09-08 -- 0.56.2 - * daemon: call initgroups/setgid before setuid (closes #74173) - - * decorators: @monkeypatch should produce a method object (closes #73920) - - * modutils: allow overriding of _getobj by suppressing mangling - - - -2011-08-05 -- 0.56.1 - * clcommands: #72450 --rc-file option doesn't work - - - -2011-06-09 -- 0.56.0 - * clcommands: make registration possible by class decoration - - * date: new datetime/delta <-> seconds/days conversion function - - * decorators: refactored @cached to allow usages such as - @cached(cacheattr='_cachename') while keeping bw compat - - - -2011-04-01 -- 0.55.2 - * new function for password generation in shellutils - - * pyro_ext: allow to create a server without registering with a pyrons - - - -2011-03-28 -- 0.55.1 - * fix date.ustrftime break if year <= 1900 - - * fix graph.py incorrectly builds command lines using %s to call dot - - * new functions to get UTC datetime / time - - - -2011-02-18 -- 0.55.0 - * new urllib2ext module providing a GSSAPI authentication handler, based on python-kerberos - - * graph: test and fix ordered_nodes() [closes #60288] - - * changelog: refactor ChangeLog class to ease overriding - - * testlib: Fix tag handling for generator. - - - -2011-01-12 -- 0.54.0 - * dropped python 2.3 support - - * daemon: we can now specify umask to daemonize function, and it return - different exit code according to the process - - * pyro_ext: new ns_reregister function to ensure a name is still properly - registered in the pyro name server - - * hg: new incoming/outgoing functions backward compatible with regards to - mercurial version (eg hg 1.6 and earlier) - - * testlib/pytest: more deprecation and removed code. Still on the way to - unittest2 - - - -2010-11-15 -- 0.53.0 - * first python3.x compatible release - - * __init__: tempattr context manager - - * shellutils: progress context manager - - - -2010-10-11 -- 0.52.1 - * configuration: fix pb with option names as unicode string w/ - python 2.5. Makes OptionError available through the module - - * textutils: text_to_dict skip comments (# lines) - - * compat: dropped some 2.2 compat - - * modutils: Consider arch-specific installation for STD_LIB_DIR definition - - - -2010-09-28 -- 0.52.0 - * testlib is now based on unittest2, to prepare its own extinction. - Warning are printed so you can easily migrate step by step. - - * restored python 2.3 compat in some modules, so one get a change to run - pylint at least - - * textutils: use NFKD decomposition in unormalize() - - * logging_ext: don't try to use ansi colorized formatter when not in debug - mode - - - -2010-09-10 -- 0.51.1 - * logging_ext: init_log function splitted into smaller chunk to ease reuse - in other contexts - - * clcommands: enhanced/cleaned api, nicer usage display - - * various pylint detected errors fixed - - - -2010-08-26 -- 0.51.0 - * testlib: don't raise string exception (closes #35331) - - * hg: new module regrouping some mercurial utility functions - - * clcommands: refactored to get more object oriented api. - - * optparser: module is now deprecated, use clcommands instead - - * textutils: new split_url_or_path and text_to_dict functions - - * logging_ext: - - init_log now accept optionaly any arbitrary handler - - threshold default to DEBUG if debug flag is true and no threshold specified - - * date: new ustrftime implementation working around datetime limitaion on dates < 1900 - - - -2010-06-04 -- 0.50.3 - * logging: added new optional kw argument to init_log rotating_parameters - - * date: fix nb_open_days() codomain, positive natural numbers are expected - - * configuration: - - skip option with no type, avoid pb with generated option such as long-help - - handle level on man page generation - - - -2010-05-21 -- 0.50.2 - * fix licensing information: LGPL v2.1 or greater - - * daemon: new daemonize function - - * modutils: fix some false negative of is_standard_module with - 'from module import something" where something isn't a submodule - - * optik_ext: fix help generation for normal optparse using script if - optik_ext has been imported (#24450) - - * textutils support 256 colors when available - - * testlib] add option splitlines to assertTextEquals - - - -2010-04-26 -- 0.50.1 - * implements __repr__ on nullobject - - * configuration: avoid crash by skipping option without 'type' - entry while input a config - - * pyro_ext: raise PyroError instead of exception - - - -2010-04-20 -- 0.50.0 - * graph: - - generate methods now takes an optional mapfile argument to generate - html image maps - - new ordered_nodes function taking a dependency graph dict as arguments - and returning an ordered list of nodes - - * configuration: - - nicer serialization of bytes / time option - - may now contains several option provider with the same name - - consider 'level' in option dict, --help displaying only option with level - 0, and automatically adding --long-help options for higher levels - - * textutils: case insensitive apply_unit - - * sphinx_ext: new module usable as a sphinx pluggin and containing a new - 'autodocstring' directive - - * ureports: output   instead of   for strict xhtml compliance - - * decorators: @cached propery copy inner function docstring - - - -2010-03-16 -- 0.49.0 - * date: new 'totime' function - - * adbh, db, sqlgen modules moved to the new logilab-database package - - * pytest: when -x option is given, stop on the first error even if - there are multiple test directories - - - -2010-02-26 -- 0.48.1 - * adbh: added dbport optional argument to [backup|restore]_commands - - * db: fix date processing for SQLServer 2005 - - * testlib: improve XML assertion by using ElementTree parser and a new 'context' lines argument - - - -2010-02-17 -- 0.48.0 - * date: fixed mx date time compat for date_range (#20651) - - * testlib: generative test should not be interrupted by self.skip() (#20648) - - - -2010-02-10 -- 0.47.0 - * adbh: changed backup / restore api (BREAKS COMPAT): - - backup_command is now backup_commands (eg return a list of commands) - - each command returned in backup_commands/restore_commands may now - be list that may be used as argument to subprocess.call, or a string - which will the requires a subshell - - new sql_rename_col method - - * deprecation: deprecated now takes an optional 'stacklevel' argument, default to 2 - - * date: some functions to ease python's datetime module usage have been backported - from cubicweb - - - -2009-12-23 -- 0.46.0 - * db / adbh: added SQL Server support using Pyodbc - - * db: - - New optional extra_args argument to get_connection. - - Support Windows Auth for SQLServer by giving - extra_args='Trusted_Connection' to the sqlserver2005 driver - - - -2009-11-23 -- 0.45.2 - * configuration: - - proper bytes and time option types support - - make Method usable as 'callback' value - - fix #8849 Using plugins, options and .pylintrc crashes PyLint - - * graph: fix has_path returned value to include the destination node, else we get - an empty list which makes think there is no path (test added) - - - -2009-08-26 -- 0.45.0 - * added function for parsing XML processing instructions - - - -2009-08-07 -- 0.44.0 - * remove code deprecated for a while now - - * shellutils: replace confirm function by RawInput class /ASK singleton - - * deprecation: new deprecated decorator, replacing both obsolete and deprecated_function - - - -2009-07-21 -- 0.43.0 - * dbf: a DBF reader which reads Visual Fox Pro DBF format with Memo field (module from Yusdi Santoso) - - * shellutils: - - #9764 add title to shellutils.ProgressBar - - #9796 new confirm function - - * testlib: - - simplify traceback manipulation (skip first frames corresponding to testlib functions) - - -c now captures DeprecationWarnings - - * sphinxutils: simplified API - - * modutils: new cleanup_sys_modules function that removes modules under a list - of directories from sys.modules - - - -2009-07-17 -- 0.42.0 - * pyro_ext: new module for pyro utilities - - * adbh: fix default set_null_allowed implementation, new case_sensitive - resource descriptor - - - -2009-06-03 -- 0.41.0 - * modutils: new extrapath argument to modpath_from_file (see function's - docstring for explanation) - - * adbh: new alter_column_support flag, sql_set_null_allowed and - sql_change_col_type methods - - - -2009-05-28 -- 0.40.1 - * date: handle both mx.DateTime and datetime representations - - * db: use sqlite native module's Binary, not StringIO - - - -2009-05-14 -- 0.40.0 - * python < 2.3 are now officially unsupported - - * #9162: new module with some sphinx utilities - - * #9166: use a global variable to control mx datetime / py datetime usage - - * db: add time adapter for pysqlite2, fix mysql bool and string handling - - * configuration: don't print default for store_true / store_false option - or option with None as default - - - -2009-04-07 -- 0.39.1 - * fix #6760 umessage.decode_QP() crashes on unknown encoding - - - -2009-03-25 -- 0.39.0 - * fix #7915 (shellutils unusable under windows) - - * testlib: - - * new profile option using cProfile - - * allows to skip a module by raising TestSkipped from module import - - * modutils: locate modules in zip/egg archive - - * db: USE_MX_DATETIME global to control usage of mx.DateTime / py datetime - - - -2009-01-26 -- 0.38.0 - * setuptools / easy_install support! - - * removed some old backward compat code - - * adbh: new intersect_all_support attribute - - * contexts: new pushd context manager - - * shellutils: enhance acquire_lock method w/ race condition - - * configuration: fix case sensitivity pb w/ config file sections - - * pytest: reimplemented colorization - - - -2009-01-08 -- 0.37.2 - * configuration: encoding handling for configuration file generation - - * adbh: fix Datetime type map for mysql - - * logging_ext: drop lldebug level which shouldn't be there - - - -2008-12-11 -- 0.37.1 - * contexts: make the module syntactically correct wrt python2.4 - - - -2008-12-09 -- 0.37.0 - * contexts: new module for context managers, keeping py <2.4 syntax compat - for distribution (only `tempdir` cm for now) - - * tasksqueue: new module containing a class to handle prioritized tasks queue - - * proc: new module for process information / resource control - - * optik_ext: new time/bytes option types, using textutils conversion function - - * logging_ext: new set_log_methods / init_log utility functions - - - -2008-10-30 -- 0.36.0 - * configuration: - - option yn is now behaving like a flag (i.e --ex : if ex.default=True and --ex in sys.args then ex.value=False) - - new attribute hide in option (i.e --ex : if --ex has 'hide':True then the option will not be displayed in man or --help) - - * pytest: - - add colors in display - - new option --restart that skips tests that succeeded on last run - - * cache: new herits from dict class - - * decorators: add @require_version @require_module that skip test if decorators are not satisfied - - - -2008-10-09 -- 0.35.3 - * graph: new has_path method - - - -2008-10-01 -- 0.35.2 - * configuration: - - fix #6011: lgc.configuration ignore customized option values - - fix #3278: man page generation broken - - * dropped context.py module which broke the debian package when - some python <2.5 is installed (#5979) - - - -2008-09-10 -- 0.35.0 - * fix #5945: wrong edge properties in graph.DotBackend - - * testlib: filter tests with tag decorator - - * shellutils: new simple unzip function - - - -2008-08-07 -- 0.34.0 - * changelog: properly adds new line at the end of each entry - - * testlib: add a with_tempdir decorator ensuring all temporary files and dirs are removed - - * graph: improve DotBackend configuration. graphiz rendered can now be selected - and additional graph parameter used - - * db: support of Decimal Type - - - -2008-06-25 -- 0.33.0 - * decorators: new @locked decorator - - * cache: make it thread safe, changed behaviour so that when cache size is 0 - and __delitem__ is called, a KeyError is raised (more consistent) - - * testlib: - - added assertIsNot, assertNone and assertNotNone assertion - - added assertUnorderedIterableEquals - - added assertDirEquals - - various failure output improvement - - * umessage: umessage.date() may return unparsable string as is instead of None - - * compat: adds a max function taking 'key' as keyword argument as in 2.5 - - * configuration: escape rest when printing for default value - - - -2008-06-08 -- 0.32.0 - * textutils: add the apply_unit function - - * testlib: - - added a assertXMLEqualsTuple test assertion - - added a assertIs assertion - - - -2008-05-08 -- 0.31.0 - * improved documentation and error messages - - * testlib: support a msg argument on more assertions, pysqlite2 as default - - * pytest: pytestconf.py for customization - - - -2008-03-26 -- 0.30.0 - * db: remember logged user on the connection - - * clcommands: commands may be hidden (e.g. not displayed in help), generic - ListCommandsCommand useful to build bash completion helpers - - * changelog: module to parse ChangeLog file as this one, backported from - logilab.devtools - - - -2008-03-12 -- 0.29.1 - * date: new nb_open_days function counting worked days between two date - - * adbh: add -p option to mysql commands to ask for password - - - -2008-03-05 -- 0.29.0 - * adbh: mysql doesn't support ILIKE, implement list_indices for mysql - - * db: mysql adapter use mx DateTime when available, fix unicode handling - - - -2008-02-18 -- 0.28.2 - * testlib: restore python2.3 compatibility - - - -2008-02-15 -- 0.28.1 - * testlib: introduce InnerTest class to name generative tests, fix - generative tests description storage - - * pytest: fix -s option - - * modutils: included Stefan Rank's patch to deal with 2.4 relative import - - * configuration: don't give option's keywords not recognized by optparse, - fix merge_options function - - - -2008-02-05 -- 0.28.0 - * date: new `add_days_worked` function - - * shellutils: new `chown` function - - * testlib: new `strict` argument to assertIsInstance - - * __init__: new `attrdict` and `nullobject` classes - - - -2008-01-25 -- 0.27.0 - * deprecation: new class_moved utility function - - * interface: fix subinterface handling - - - -2008-01-10 -- 0.26.1 - * optparser: support --version at main command level - - * testlib: added man page for pytest - - * textutils: fix a bug in normalize{_,_rest_}paragraph which may cause - infinite loop if an indent string containing some spaces is given - - - -2008-01-07 -- 0.26.0 - * db: binarywrap support - - * modutils: new LazyObject class - - - -2007-12-20 -- 0.25.2 - * adbh: new needs_from_clause variable on db helper - - - -2007-12-11 -- 0.25.1 - * pytest: new --profile option, setup module / teardown module hook, - other fixes and enhancements - - * db: mysql support fixes - - * adbh: fix postgres list_indices implementation - - - -2007-11-26 -- 0.25.0 - * adbh: - - list_tables implementation for sqlite - - new list_indices, create_index, drop_index methods - - * restore python < 2.4 compat - - - -2007-10-29 -- 0.24.0 - * decorators: new classproperty decorator - - * adbh: new module containing advanced db helper which were in the "db" - module, with additional registered procedures handling - - - -2007-10-23 -- 0.23.1 - * modutils: fix load_module_from_* (even with use_sys=False, it should - try to get outer packages from sys.modules) - - - -2007-10-17 -- 0.23.0 - * db: - - - mark support_users and support_groups methods as obsolete in - favor of users_support and groups_support attributes - - new ilike_support property on dbms helpers - - extended db helper api - - completed mysql support - - * textutils: new unormalize function to normalize diacritical chars by - their ascii equivalent - - * modutils: new load_module_from_file shortcut function - - * clcommands: pop_args accept None as value for expected_size_after, - meaning remaining args should not be checked - - * interface: new extend function to dynamically add an implemented interface - to a new style class - - - -2007-06-25 -- 0.22.2 - * new 'typechanged' action for configuration.read_old_config - - - -2007-05-14 -- 0.22.1 - * important bug fix in db.py - - * added history in pytest debugger sessions - - * fix pytest coverage bug - - * fix textutils test - - * fix a bug which provoked a crash if devtools was not installed - - - -2007-05-14 -- 0.22.0 - * pytest improvements - - * shellutils: use shutil.move instead of os.rename as default action - of mv - - * db: new `list_users` and `sql_drop_unique_constraint` methods on - advanced helpers - - * deprecation: new `obsolete` decorator - - - -2007-02-12 -- 0.21.3 - * fixed cached decorator to use __dict__ instead of attribute lookup, - avoiding potential bugs with inheritance when using cached class - methods - - - -2007-02-05 -- 0.21.2 - * fix ReST normalization (#3471) - - - -2006-12-19 -- 0.21.1 - * tree: make Node iterable (iter on its children) - - * configuration: fix #3197 (OptionsManagerMixin __init__ isn't passing - correctly its "version" argument) - - * textutils: new 'rest' argument to normalize_text to better deal with - ReST formated text - - * some packaging fixes - - - -2006-11-14 -- 0.21.0 - * db: - - - new optional keepownership argument to backup|restore_database methods - - only register mxDatetime converters on psycopg2 adapter if - mx.DateTime is available - - * moved some stuff which was in common __init__ file into specific - module. At this occasion new "decorators" and "deprecation" modules - has been added - - * deprecated fileutils.[files_by_ext,include_files_by_ext,exclude_files_by_ext] - functions in favor of new function shellutils.find - - * mark the following modules for deprecation, they will be removed in a - near version: - - * astutils: moved to astng - - * bind (never been used) - - * html: deprecated - - * logger/logservice: use logging module - - * monclient/monserver (not used anymore) - - * patricia (never been used) - - * twisted_distutils (not used anymore) - - * removed the following functions/methods which have been deprecated for a - while now: - - * modutils.load_module_from_parts - - * textutils.searchall - - * tree.Node.leafs - - * fileutils.get_by_ext, filetutils.get_mode, fileutils.ensure_mode - - * umessage: more robust charset handling - - - -2006-11-03 -- 0.20.2 - * fileutils: new remove_dead_links function - - * date: add missing strptime import - - - -2006-11-01 -- 0.20.1 - * umessage: - - new message_from_string function - - fixed get_payload encoding bug - - * db: default postgres module is now psycopg2, which has been customized - to return mx.Datetime objects for date/time related types - - - -2006-10-27 -- 0.20.0 - * db: - - fixed date handling - - new methods on advanced helper to generate backup commands - - * configuration: basic deprecated config handling support - - * new implementation of pytest - - * backport a dot backend from yams into a new "graph" module - - - -2006-10-03 -- 0.19.3 - * fixed bug in textutils.normalise_[text|paragraph] with unsplitable - word larger than the maximum line size - - * added pytest.bat for windows installation - - * changed configuration.generate_config to include None values into the - generated file - - - -2006-09-25 -- 0.19.2 - * testlib: - - fixed a bug in find_test making it returns some bad test names - - new assertIsInstance method on TestCase - - * optik_ext: make it works if mx.DateTime is not installed, in which case - the date type option won't be available - - * test fixes - - - -2006-09-22 -- 0.19.1 - * db: - - - fixed bug when querying boolean on sqlite using python's bool type - - fixed time handling and added an adapter for DateTimeDeltaType - - added "drop_on_commit" argument to create_temporary_table on db helper - - added missing implementation of executemany on pysqlite2 wrapper to - support pyargs correctly like execute - - * optik_ext: fixed "named" type option to support csv values and to return - a dictionary - - - -2006-09-05 -- 0.19.0 - * new umessage module which provides a class similar to the standard - email.Message class but returning unicode strings - - * new clcommands module to handle commands based command line tool - (based on the configuration module) - - * new "date" option type in optik_ext - - * new AttrObject in testlib to create objects in test with arbitrary attributes - - * add pytest to run project's tests and get rid of all runtests.py - - * add pytest option to enable design-by-contract using aspects - - * some enhancements to the configuration module - - - -2006-08-09 -- 0.18.0 - * added -c / --capture option to testlib.unittest_main - - * fixed bugs in lgc.configuration - - * optparser: added a OptionParser that extends optparse's with commands - - - -2006-07-13 -- 0.17.0 - * python2.5 compatibility (testlib.py + compat.py) - - * testlib.assertListEquals return all errors at once - - * new "password" option type in optik_ext - - * configuration: refactored to support interactive input of a configuration - - - -2006-06-08 -- 0.16.1 - * testlib: improved test collections - - * compat: added cmp argument to sorted - - - -2006-05-19 -- 0.16.0 - * testlib: - - - added a set of command line options (PYDEBUG is deprecated, - use the -i/--pdb option, and added -x/--exitfirst option) - - added support for generative tests - - * db: - - fix get_connection parameter order and host/port handling - - added .sql_temporary_table method to advanced func helpers - - started a psycopg2 adapter - - * configuration: enhanced to handle default value in help and man pages - generation (require python >= 2.4) - - - -2006-04-25 -- 0.15.1 - * db: add missing port handling to get_connection function and - dbapimodule.connect methods - - * testlib: various fixes and minor improvements - - - -2006-03-28 -- 0.15.0 - * added "cached" decorator and a simple text progression bar into __init__ - - * added a simple text progress bar into __init__ - - * configuration: fixed man page generation when using python 2.4 - - * db: added pysqllite2 support, preconfigured to handle timestamp using - mxDatetime and to correctly handle boolean types - - - -2006-03-06 -- 0.14.1 - * backported file support and add LOG_CRIT to builtin in logservice module - - - -2006-02-28 -- 0.14.0 - * renamed assertXML*Valid to assertXML*WellFormed and deprecated the old name - - * fixed modutils.load_module_from_* - - - -2006-02-03 -- 0.13.1 - * fix some tests, patch contributed by Marien Zwart - - * added ability to log into a file with make_logger() - - - -2006-01-06 -- 0.13.0 - * testlib: ability to skip a test - - * configuration: - - - cleaner configuration file generation - - refactoring so that we can have more control on file - configuration loading using read_config_file and load_config_file - instead of load_file_configuration - - * modutils: fix is_relative to return False when from_file is a file - located somewhere in sys.path - - * ureport: new "escaped" attribute on Text nodes, controling html escaping - - * compat: make set iterable and support more other set operations... - - * removed the astng sub-package, since it's now self-distributed as - logilab-astng - - - -2005-09-06 -- 0.12.0 - * shellutils: bug fix in mv() - - * compat: - - use set when available - - added sorted and reversed - - * table: new methods and some optimizations - - * tree: added some deprecation warnings - - - -2005-07-25 -- 0.11.0 - * db: refactoring, added sqlite support, new helpers to support DBMS - specific features - - - -2005-07-07 -- 0.10.1 - * configuration: added basic man page generation feature - - * ureports: unicode handling, some minor fixes - - * testlib: enhance MockConnection - - * python2.2 related fixes in configuration and astng - - - -2005-05-04 -- 0.10.0 - * astng: improve unit tests coverage - - * astng.astng: fix Function.format_args, new method - Function.default_value, bug fix in Node.resolve - - * astng.builder: handle classmethod and staticmethod as decorator, - handle data descriptors when building from living objects - - * ureports: - - new docbook formatter - - handle ReST like urls in the text writer - - new build_summary utility function - - - -2005-04-14 -- 0.9.3 - * optik_ext: add man page generation based on optik/optparse options - definition - - * modutils: new arguments to get_source_file to handle files without - extensions - - * astng: fix problem with the manager and python 2.2 (optik related) - - - -2005-02-16 -- 0.9.2 - * textutils: - - - added epydoc documentation - - new sep argument to the get_csv function - - fix pb with normalize_* functions on windows platforms - - * fileutils: - - - added epydoc documentation - - fixed bug in get_by_ext (renamed files_by_ext) with the - exclude_dirs argument - - * configuration: - - fixed a bug in configuration file generation on windows platforms - - better test coverage - - * fixed testlib.DocTest which wasn't working anymore with recent - versions of pyunit - - * added "context_file" argument to file_from_modpath to avoid - possible relative import problems - - * astng: use the new context_file argument from Node.resolve() - - - -2005-02-04 -- 0.9.1 - * astng: - - - remove buggy print - - fixed builder to deal with builtin methods - - fixed raw_building.build_function with python 2.4 - - * modutils: code cleanup, some reimplementation based on "imp", - better handling of windows specific extensions, epydoc documentation - - * fileutils: new exclude_dirs argument to the get_by_ext function - - * testlib: main() support -p option to run test in a profiled mode - - * generated documentation for modutils in the doc/ subdirectory - - - -2005-01-20 -- 0.9.0 - * astng: - - - refactoring of some huge methods - - fix interface resolving when __implements__ is defined in a parent - class in another module - - add special code in the builder to fix problem with qt - - new source_line method on Node - - fix sys.path during parsing to avoid some failure when trying - to get imported names by `from module import *`, and use an astng - building instead of exec'ing the statement - - fix possible AttributeError with Function.type - - manager.astng_from_file fallback to astng_from_module if possible - - * textutils: fix bug in normalize_paragraph, unquote handle empty string - correctly - - * modutils: - - - use a cache in has_module to speed up things when heavily used - - fix file_from_modpath to handle pyxml and os.path - - * configuration: fix problem with serialization/deserialization of empty - string - - - -2005-01-04 -- 0.8.0 - * modutils: a lot of fixes/rewrite on various functions to avoid - unnecessary imports, sys.path pollution, and other bugs (notably - making pylint reporting wrong modules name/path) - - * astng: new "inspector" module, initially taken from pyreverse code - (http://www.logilab.org/projects/pyreverse), miscellaneous bug fixes - - * configuration: new 'usage' parameter on the Configuration - initializer - - * logger: unicode support - - * fileutils: get_by_ext also ignore ".svn" directories, not only "CVS" - - - -2004-11-03 -- 0.7.1 - * astng: - - - don't raise a syntax error on files missing a trailing \n. - - fix utils.is_abstract (was causing an unexpected exception if a - string exception was raised). - - fix utils.get_implemented. - - fix file based manager's cache problem. - - * textutils: fixed normalize_text / normalize_paragraph functions - - - -2004-10-11 -- 0.7.0 - * astng: new methods on the manager, returning astng with nodes for - packages (i.e. recursive structure instead of the flat one), with - automatic lazy loading + introduction of a dict like interface to - manipulate those nodes and Module, Class and Function nodes. - - * logservice: module imported from the ginco project - - * configuration: added new classes Configuration and - OptionsManager2Configuration adapter, fix bug in loading options - from file - - * optik_ext/configuration: some new option type "multiple_choice" - - * fileutils: new ensure_mode function - - * compat: support for sum and enumerate - - - -2004-09-23 -- 0.6.0 - * db: added DBAPIAdapter - - * textutils: fix in pretty_match causing malformated messages in pylint - added ansi colorization management - - * modutils: new functions get_module_files, has_module and file_from_modpath - - * astng: some new utility functions taken from pylint, minor changes to the - manager API, Node.resolve doesn't support anymore "living" resolution, - some new methods on astng nodes - - * compat: new module for a transparent compatibility layer between - different python version (actually 2.2 vs 2.3 for now) - - - -2004-07-08 -- 0.5.2 - * astng: fix another bug in klassnode.ancestors() method... - - * db: fix mysql access - - * cli: added a space after the prompt - - - -2004-06-04 -- 0.5.1 - * astng: fix undefined var bug in klassnode.ancestors() method - - * ureports: fix attributes on title layout - - * packaging:fix the setup.py script to allow bdist_winst (well, the - generated installer has not been tested...) with the necessary - logilab/__init__.py file - - - -2004-05-10 -- 0.5.0 - * ureports: new Universal Reports sub-package - - * xmlrpcutils: new xmlrpc utilities module - - * astng: resolve(name) now handle (at least try) builtins - - * astng: fixed Class.as_string (empty parent when no base classes) - - * astng.builder: knows a little about method descriptors, Function with - unknown arguments have argnames==None. - - * fileutils: new is_binary(filename) function - - * textutils: fixed some Windows bug - - * tree: base not doesn't have the "title" attribute anymore - - * testlib: removed the spawn function (who used that ?!), added MockSMTP, - MockConfigParser, MockConnexion and DocTestCase (test class for - modules embedding doctest). All mocks objects are very basic and will be - enhanced as the need comes. - - * testlib: added a TestCase class with some additional methods then - the regular unittest.TestCase class - - * cli: allow specifying a command prefix by a class attributes,more - robust, print available commands on help - - * db: new "binary" function to get the binary wrapper for a given driver, - and new "system_database" function returning the system database name - for different DBMS. - - * configuration: better group control - - - -2004-02-20 -- 0.4.5 - * db: it's now possible to fix the modules search order. By default call - set_isolation_level if psycopg is used - - - -2004-02-17 -- 0.4.4 - * modutils: special case for os.path in get_module_part - - * astng: handle special case where we are on a package node importing a module - using the same name as the package, which may end in an infinite loop - on relative imports in Node.resolve - - * fileutils: new get_by_ext function - - - -2004-02-11 -- 0.4.3 - * astng: refactoring of Class.ancestor_for_* methods (now - depends on python 2.2 generators) - - * astng: make it more robust - - * configuration: more explicit exception when a bad option is - provided - - * configuration: define a short version of an option using the "short" - keyword, taking a single letter as value - - * configuration: new method global_set_option on the manager - - * testlib : allow no "suite" nor "Run" function in test modules - - * shellutils: fix bug in *mv* - - - -2003-12-23 -- 0.4.2 - * added Project class and some new methods to the ASTNGManger - - * some new functions in astng.utils - - * fixed bugs in some as_string methods - - * fixed bug in textutils.get_csv - - * fileutils.lines now take a "comments" argument, allowing to ignore - comment lines - - - -2003-11-24 -- 0.4.1 - * added missing as_string methods on astng nodes - - * bug fixes on Node.resolve - - * minor fixes in textutils and fileutils - - * better test coverage (need more !) - - - -2003-11-13 -- 0.4.0 - * new textutils and shellutils modules - - * full astng rewrite, now based on the compiler.ast package from the - standard library - - * added next_sbling and previous_sibling methods to Node - - * fix get_cycles - - - -2003-10-14 -- 0.3.5 - * fixed null size cache bug - - * added 'sort_by_column*' methods for tables - - - -2003-10-08 -- 0.3.4 - * fix bug in asntg, occurring with python2.3 and modules including an - encoding declaration - - * fix bug in astutils.get_rhs_consumed_names, occurring in lists - comprehension - - * remove debug print statement from configuration.py which caused a - generation of incorrect configuration files. - - - -2003-10-01 -- 0.3.3 - * fix bug in modutils.modpath_from_file - - * new module corbautils - - - -2003-09-18 -- 0.3.2 - * fix bug in modutils.load_module_from_parts - - * add missing __future__ imports - - - -2003-09-18 -- 0.3.1 - * change implementation of modutils.load_module_from_name (use find_module - and load_module instead of __import__) - - * more bug fixes in astng - - * new functions in fileutils (lines, export) and __init__ (Execute) - - - -2003-09-12 -- 0.3 - * expect "def suite" or "def Run(runner=None)" on unittest module - - * fixes in modutils - - * major fixes in astng - - * new fileutils and astutils modules - - * enhancement of the configuration module - - * new option type "named" in optik_the ext module - - - -2003-06-18 -- 0.2.2 - * astng bug fixes - - - -2003-06-04 -- 0.2.1 - * bug fixes - - * fix packaging problem - - - -2003-06-02 -- 0.2.0 - * add the interface, modutils, optik_ext and configuration modules - - * add the astng sub-package - - * miscellaneous fixes - - - -2003-04-17 -- 0.1.2 - * add the stringio module - - * minor fixes - - - -2003-02-28 -- 0.1.1 - * fix bug in tree.py - - * new file distutils_twisted - - - -2003-02-17 -- 0.1.0 - * initial revision - - - diff --git a/pymode/libs/logilab-common-1.4.1/MANIFEST.in b/pymode/libs/logilab-common-1.4.1/MANIFEST.in deleted file mode 100644 index faee190f..00000000 --- a/pymode/libs/logilab-common-1.4.1/MANIFEST.in +++ /dev/null @@ -1,14 +0,0 @@ -include ChangeLog -include README* -include COPYING -include COPYING.LESSER -include bin/logilab-pytest -include bin/logilab-pytest.bat -include test/data/ChangeLog -recursive-include test *.py *.txt *.msg *.ini *.zip *.egg -recursive-include test/data/*_dir * -recursive-include test/input *.py -recursive-include doc/html * -include doc/logilab-pytest.1 -include doc/makefile -include __pkginfo__.py diff --git a/pymode/libs/logilab-common-1.4.1/PKG-INFO b/pymode/libs/logilab-common-1.4.1/PKG-INFO deleted file mode 100644 index 9dca2cdd..00000000 --- a/pymode/libs/logilab-common-1.4.1/PKG-INFO +++ /dev/null @@ -1,164 +0,0 @@ -Metadata-Version: 1.1 -Name: logilab-common -Version: 1.4.1 -Summary: collection of low-level Python packages and modules used by Logilab projects -Home-page: http://www.logilab.org/project/logilab-common -Author: Logilab -Author-email: contact@logilab.fr -License: LGPL -Description: Logilab's common library - ======================== - - What's this ? - ------------- - - This package contains some modules used by different Logilab projects. - - It is released under the GNU Lesser General Public License. - - There is no documentation available yet but the source code should be clean and - well documented. - - Designed to ease: - - * handling command line options and configuration files - * writing interactive command line tools - * manipulation of files and character strings - * manipulation of common structures such as graph, tree, and pattern such as visitor - * generating text and HTML reports - * more... - - - Installation - ------------ - - Extract the tarball, jump into the created directory and run :: - - python setup.py install - - For installation options, see :: - - python setup.py install --help - - - Provided modules - ---------------- - - Here is a brief description of the available modules. - - Modules providing high-level features - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - * `cache`, a cache implementation with a least recently used algorithm. - - * `changelog`, a tiny library to manipulate our simplified ChangeLog file format. - - * `clcommands`, high-level classes to define command line programs handling - different subcommands. It is based on `configuration` to get easy command line - / configuration file handling. - - * `configuration`, some classes to handle unified configuration from both - command line (using optparse) and configuration file (using ConfigParser). - - * `proc`, interface to Linux /proc. - - * `umessage`, unicode email support. - - * `ureports`, micro-reports, a way to create simple reports using python objects - without care of the final formatting. ReST and html formatters are provided. - - - Modules providing low-level functions and structures - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - * `compat`, provides a transparent compatibility layer between different python - versions. - - * `date`, a set of date manipulation functions. - - * `daemon`, a daemon function and mix-in class to properly start an Unix daemon - process. - - * `decorators`, function decorators such as cached, timed... - - * `deprecation`, decorator, metaclass & all to mark functions / classes as - deprecated or moved - - * `fileutils`, some file / file path manipulation utilities. - - * `graph`, graph manipulations functions such as cycle detection, bases for dot - file generation. - - * `modutils`, python module manipulation functions. - - * `shellutils`, some powerful shell like functions to replace shell scripts with - python scripts. - - * `tasksqueue`, a prioritized tasks queue implementation. - - * `textutils`, some text manipulation functions (ansi colorization, line wrapping, - rest support...). - - * `tree`, base class to represent tree structure, and some others to make it - works with the visitor implementation (see below). - - * `visitor`, a generic visitor pattern implementation. - - - Modules extending some standard modules - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - * `debugger`, `pdb` customization. - - * `logging_ext`, extensions to `logging` module such as a colorized formatter - and an easier initialization function. - - * `optik_ext`, defines some new option types (regexp, csv, color, date, etc.) - for `optik` / `optparse` - - - Modules extending some external modules - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - * `sphinx_ext`, Sphinx_ plugin defining a `autodocstring` directive. - - * `vcgutils` , utilities functions to generate file readable with Georg Sander's - vcg tool (Visualization of Compiler Graphs). - - - To be deprecated modules - ~~~~~~~~~~~~~~~~~~~~~~~~ - - Those `logilab.common` modules will much probably be deprecated in future - versions: - - * `testlib`: use `unittest2`_ instead - * `interface`: use `zope.interface`_ if you really want this - * `table`, `xmlutils`: is that used? - * `sphinxutils`: we won't go that way imo (i == syt) - - - Comments, support, bug reports - ------------------------------ - - Project page https://www.logilab.org/project/logilab-common - - Use the python-projects@lists.logilab.org mailing list. - - You can subscribe to this mailing list at - https://lists.logilab.org/mailman/listinfo/python-projects - - Archives are available at - https://lists.logilab.org/pipermail/python-projects/ - - - .. _Sphinx: http://sphinx.pocoo.org/ - .. _`unittest2`: http://pypi.python.org/pypi/unittest2 - .. _`discover`: http://pypi.python.org/pypi/discover - .. _`zope.interface`: http://pypi.python.org/pypi/zope.interface - -Platform: UNKNOWN -Classifier: Topic :: Utilities -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 3 diff --git a/pymode/libs/logilab-common-1.4.1/README b/pymode/libs/logilab-common-1.4.1/README deleted file mode 100644 index 21cbe78d..00000000 --- a/pymode/libs/logilab-common-1.4.1/README +++ /dev/null @@ -1,150 +0,0 @@ -Logilab's common library -======================== - -What's this ? -------------- - -This package contains some modules used by different Logilab projects. - -It is released under the GNU Lesser General Public License. - -There is no documentation available yet but the source code should be clean and -well documented. - -Designed to ease: - -* handling command line options and configuration files -* writing interactive command line tools -* manipulation of files and character strings -* manipulation of common structures such as graph, tree, and pattern such as visitor -* generating text and HTML reports -* more... - - -Installation ------------- - -Extract the tarball, jump into the created directory and run :: - - python setup.py install - -For installation options, see :: - - python setup.py install --help - - -Provided modules ----------------- - -Here is a brief description of the available modules. - -Modules providing high-level features -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -* `cache`, a cache implementation with a least recently used algorithm. - -* `changelog`, a tiny library to manipulate our simplified ChangeLog file format. - -* `clcommands`, high-level classes to define command line programs handling - different subcommands. It is based on `configuration` to get easy command line - / configuration file handling. - -* `configuration`, some classes to handle unified configuration from both - command line (using optparse) and configuration file (using ConfigParser). - -* `proc`, interface to Linux /proc. - -* `umessage`, unicode email support. - -* `ureports`, micro-reports, a way to create simple reports using python objects - without care of the final formatting. ReST and html formatters are provided. - - -Modules providing low-level functions and structures -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -* `compat`, provides a transparent compatibility layer between different python - versions. - -* `date`, a set of date manipulation functions. - -* `daemon`, a daemon function and mix-in class to properly start an Unix daemon - process. - -* `decorators`, function decorators such as cached, timed... - -* `deprecation`, decorator, metaclass & all to mark functions / classes as - deprecated or moved - -* `fileutils`, some file / file path manipulation utilities. - -* `graph`, graph manipulations functions such as cycle detection, bases for dot - file generation. - -* `modutils`, python module manipulation functions. - -* `shellutils`, some powerful shell like functions to replace shell scripts with - python scripts. - -* `tasksqueue`, a prioritized tasks queue implementation. - -* `textutils`, some text manipulation functions (ansi colorization, line wrapping, - rest support...). - -* `tree`, base class to represent tree structure, and some others to make it - works with the visitor implementation (see below). - -* `visitor`, a generic visitor pattern implementation. - - -Modules extending some standard modules -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -* `debugger`, `pdb` customization. - -* `logging_ext`, extensions to `logging` module such as a colorized formatter - and an easier initialization function. - -* `optik_ext`, defines some new option types (regexp, csv, color, date, etc.) - for `optik` / `optparse` - - -Modules extending some external modules -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -* `sphinx_ext`, Sphinx_ plugin defining a `autodocstring` directive. - -* `vcgutils` , utilities functions to generate file readable with Georg Sander's - vcg tool (Visualization of Compiler Graphs). - - -To be deprecated modules -~~~~~~~~~~~~~~~~~~~~~~~~ - -Those `logilab.common` modules will much probably be deprecated in future -versions: - -* `testlib`: use `unittest2`_ instead -* `interface`: use `zope.interface`_ if you really want this -* `table`, `xmlutils`: is that used? -* `sphinxutils`: we won't go that way imo (i == syt) - - -Comments, support, bug reports ------------------------------- - -Project page https://www.logilab.org/project/logilab-common - -Use the python-projects@lists.logilab.org mailing list. - -You can subscribe to this mailing list at -https://lists.logilab.org/mailman/listinfo/python-projects - -Archives are available at -https://lists.logilab.org/pipermail/python-projects/ - - -.. _Sphinx: http://sphinx.pocoo.org/ -.. _`unittest2`: http://pypi.python.org/pypi/unittest2 -.. _`discover`: http://pypi.python.org/pypi/discover -.. _`zope.interface`: http://pypi.python.org/pypi/zope.interface diff --git a/pymode/libs/logilab-common-1.4.1/__pkginfo__.py b/pymode/libs/logilab-common-1.4.1/__pkginfo__.py deleted file mode 100644 index b9f652fb..00000000 --- a/pymode/libs/logilab-common-1.4.1/__pkginfo__.py +++ /dev/null @@ -1,61 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""logilab.common packaging information""" -__docformat__ = "restructuredtext en" -import sys -import os - -distname = 'logilab-common' -modname = 'common' -subpackage_of = 'logilab' -subpackage_master = True - -numversion = (1, 4, 1) -version = '.'.join([str(num) for num in numversion]) - -license = 'LGPL' # 2.1 or later -description = "collection of low-level Python packages and modules used by Logilab projects" -web = "http://www.logilab.org/project/%s" % distname -mailinglist = "mailto://python-projects@lists.logilab.org" -author = "Logilab" -author_email = "contact@logilab.fr" - - -from os.path import join -scripts = [join('bin', 'logilab-pytest')] -include_dirs = [join('test', 'data')] - -install_requires = [ - 'setuptools', - 'six >= 1.4.0', -] -tests_require = [ - 'pytz', - 'egenix-mx-base', -] - -if sys.version_info < (2, 7): - install_requires.append('unittest2 >= 0.5.1') -if os.name == 'nt': - install_requires.append('colorama') - -classifiers = ["Topic :: Utilities", - "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 3", - ] diff --git a/pymode/libs/logilab-common-1.4.1/bin/logilab-pytest b/pymode/libs/logilab-common-1.4.1/bin/logilab-pytest deleted file mode 100755 index 42df3028..00000000 --- a/pymode/libs/logilab-common-1.4.1/bin/logilab-pytest +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env python - -import warnings -warnings.simplefilter('default', DeprecationWarning) - -from logilab.common.pytest import run -run() diff --git a/pymode/libs/logilab-common-1.4.1/bin/logilab-pytest.bat b/pymode/libs/logilab-common-1.4.1/bin/logilab-pytest.bat deleted file mode 100644 index c664e882..00000000 --- a/pymode/libs/logilab-common-1.4.1/bin/logilab-pytest.bat +++ /dev/null @@ -1,17 +0,0 @@ -@echo off -rem = """-*-Python-*- script -rem -------------------- DOS section -------------------- -rem You could set PYTHONPATH or TK environment variables here -python -x "%~f0" %* -goto exit - -""" -# -------------------- Python section -------------------- -from logilab.common.pytest import run -run() - -DosExitLabel = """ -:exit -rem """ - - diff --git a/pymode/libs/logilab-common-1.4.1/doc/logilab-pytest.1 b/pymode/libs/logilab-common-1.4.1/doc/logilab-pytest.1 deleted file mode 100644 index 51aec2e9..00000000 --- a/pymode/libs/logilab-common-1.4.1/doc/logilab-pytest.1 +++ /dev/null @@ -1,54 +0,0 @@ -.TH logilab-pytest "1" "January 2008" logilab-pytest -.SH NAME -.B logilab-pytest -\- run python unit tests - -.SH SYNOPSIS -usage: logilab-pytest [OPTIONS] [testfile [testpattern]] -.PP -examples: -.PP -logilab-pytest path/to/mytests.py -logilab-pytest path/to/mytests.py TheseTests -logilab-pytest path/to/mytests.py TheseTests.test_thisone -.PP -logilab-pytest one (will run both test_thisone and test_thatone) -logilab-pytest path/to/mytests.py \fB\-s\fR not (will skip test_notthisone) -.PP -logilab-pytest \fB\-\-coverage\fR test_foo.py -.IP -(only if logilab.devtools is available) -.SS "options:" -.TP -\fB\-h\fR, \fB\-\-help\fR -show this help message and exit -.TP -\fB\-t\fR TESTDIR -directory where the tests will be found -.TP -\fB\-d\fR -enable design\-by\-contract -.TP -\fB\-v\fR, \fB\-\-verbose\fR -Verbose output -.TP -\fB\-i\fR, \fB\-\-pdb\fR -Enable test failure inspection (conflicts with -\fB\-\-coverage\fR) -.TP -\fB\-x\fR, \fB\-\-exitfirst\fR -Exit on first failure (only make sense when logilab-pytest run -one test file) -.TP -\fB\-s\fR SKIPPED, \fB\-\-skip\fR=\fISKIPPED\fR -test names matching this name will be skipped to skip -several patterns, use commas -.TP -\fB\-q\fR, \fB\-\-quiet\fR -Minimal output -.TP -\fB\-P\fR PROFILE, \fB\-\-profile\fR=\fIPROFILE\fR -Profile execution and store data in the given file -.TP -\fB\-\-coverage\fR -run tests with pycoverage (conflicts with \fB\-\-pdb\fR) diff --git a/pymode/libs/logilab-common-1.4.1/doc/makefile b/pymode/libs/logilab-common-1.4.1/doc/makefile deleted file mode 100644 index 02f5d544..00000000 --- a/pymode/libs/logilab-common-1.4.1/doc/makefile +++ /dev/null @@ -1,8 +0,0 @@ -all: epydoc - -epydoc: - mkdir -p apidoc - -epydoc --parse-only -o apidoc --html -v --no-private --exclude='test' --exclude="__pkginfo__" --exclude="setup" -n "Logilab's common library" $(shell dirname $(CURDIR))/build/lib/logilab/common >/dev/null - -clean: - rm -rf apidoc diff --git a/pymode/libs/logilab-common-1.4.1/logilab/__init__.py b/pymode/libs/logilab-common-1.4.1/logilab/__init__.py deleted file mode 100644 index de40ea7c..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__import__('pkg_resources').declare_namespace(__name__) diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/__init__.py b/pymode/libs/logilab-common-1.4.1/logilab/common/__init__.py deleted file mode 100644 index 796831a7..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/__init__.py +++ /dev/null @@ -1,184 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Logilab common library (aka Logilab's extension to the standard library). - -:type STD_BLACKLIST: tuple -:var STD_BLACKLIST: directories ignored by default by the functions in - this package which have to recurse into directories - -:type IGNORED_EXTENSIONS: tuple -:var IGNORED_EXTENSIONS: file extensions that may usually be ignored -""" -__docformat__ = "restructuredtext en" - -import sys -import types -import pkg_resources - -__version__ = pkg_resources.get_distribution('logilab-common').version - -# deprecated, but keep compatibility with pylint < 1.4.4 -__pkginfo__ = types.ModuleType('__pkginfo__') -__pkginfo__.__package__ = __name__ -__pkginfo__.version = __version__ -sys.modules['logilab.common.__pkginfo__'] = __pkginfo__ - -STD_BLACKLIST = ('CVS', '.svn', '.hg', '.git', '.tox', 'debian', 'dist', 'build') - -IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc', '~', '.swp', '.orig') - -# set this to False if you've mx DateTime installed but you don't want your db -# adapter to use it (should be set before you got a connection) -USE_MX_DATETIME = True - - -class attrdict(dict): - """A dictionary for which keys are also accessible as attributes.""" - def __getattr__(self, attr): - try: - return self[attr] - except KeyError: - raise AttributeError(attr) - -class dictattr(dict): - def __init__(self, proxy): - self.__proxy = proxy - - def __getitem__(self, attr): - try: - return getattr(self.__proxy, attr) - except AttributeError: - raise KeyError(attr) - -class nullobject(object): - def __repr__(self): - return '' - def __bool__(self): - return False - __nonzero__ = __bool__ - -class tempattr(object): - def __init__(self, obj, attr, value): - self.obj = obj - self.attr = attr - self.value = value - - def __enter__(self): - self.oldvalue = getattr(self.obj, self.attr) - setattr(self.obj, self.attr, self.value) - return self.obj - - def __exit__(self, exctype, value, traceback): - setattr(self.obj, self.attr, self.oldvalue) - - - -# flatten ----- -# XXX move in a specific module and use yield instead -# do not mix flatten and translate -# -# def iterable(obj): -# try: iter(obj) -# except: return False -# return True -# -# def is_string_like(obj): -# try: obj +'' -# except (TypeError, ValueError): return False -# return True -# -#def is_scalar(obj): -# return is_string_like(obj) or not iterable(obj) -# -#def flatten(seq): -# for item in seq: -# if is_scalar(item): -# yield item -# else: -# for subitem in flatten(item): -# yield subitem - -def flatten(iterable, tr_func=None, results=None): - """Flatten a list of list with any level. - - If tr_func is not None, it should be a one argument function that'll be called - on each final element. - - :rtype: list - - >>> flatten([1, [2, 3]]) - [1, 2, 3] - """ - if results is None: - results = [] - for val in iterable: - if isinstance(val, (list, tuple)): - flatten(val, tr_func, results) - elif tr_func is None: - results.append(val) - else: - results.append(tr_func(val)) - return results - - -# XXX is function below still used ? - -def make_domains(lists): - """ - Given a list of lists, return a list of domain for each list to produce all - combinations of possibles values. - - :rtype: list - - Example: - - >>> make_domains(['a', 'b'], ['c','d', 'e']) - [['a', 'b', 'a', 'b', 'a', 'b'], ['c', 'c', 'd', 'd', 'e', 'e']] - """ - from six.moves import range - domains = [] - for iterable in lists: - new_domain = iterable[:] - for i in range(len(domains)): - domains[i] = domains[i]*len(iterable) - if domains: - missing = (len(domains[0]) - len(iterable)) / len(iterable) - i = 0 - for j in range(len(iterable)): - value = iterable[j] - for dummy in range(missing): - new_domain.insert(i, value) - i += 1 - i += 1 - domains.append(new_domain) - return domains - - -# private stuff ################################################################ - -def _handle_blacklist(blacklist, dirnames, filenames): - """remove files/directories in the black list - - dirnames/filenames are usually from os.walk - """ - for norecurs in blacklist: - if norecurs in dirnames: - dirnames.remove(norecurs) - elif norecurs in filenames: - filenames.remove(norecurs) - diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/cache.py b/pymode/libs/logilab-common-1.4.1/logilab/common/cache.py deleted file mode 100644 index 11ed1370..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/cache.py +++ /dev/null @@ -1,114 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Cache module, with a least recently used algorithm for the management of the -deletion of entries. - - - - -""" -__docformat__ = "restructuredtext en" - -from threading import Lock - -from logilab.common.decorators import locked - -_marker = object() - -class Cache(dict): - """A dictionary like cache. - - inv: - len(self._usage) <= self.size - len(self.data) <= self.size - """ - - def __init__(self, size=100): - """ Warning : Cache.__init__() != dict.__init__(). - Constructor does not take any arguments beside size. - """ - assert size >= 0, 'cache size must be >= 0 (0 meaning no caching)' - self.size = size - self._usage = [] - self._lock = Lock() - super(Cache, self).__init__() - - def _acquire(self): - self._lock.acquire() - - def _release(self): - self._lock.release() - - def _update_usage(self, key): - if not self._usage: - self._usage.append(key) - elif self._usage[-1] != key: - try: - self._usage.remove(key) - except ValueError: - # we are inserting a new key - # check the size of the dictionary - # and remove the oldest item in the cache - if self.size and len(self._usage) >= self.size: - super(Cache, self).__delitem__(self._usage[0]) - del self._usage[0] - self._usage.append(key) - else: - pass # key is already the most recently used key - - def __getitem__(self, key): - value = super(Cache, self).__getitem__(key) - self._update_usage(key) - return value - __getitem__ = locked(_acquire, _release)(__getitem__) - - def __setitem__(self, key, item): - # Just make sure that size > 0 before inserting a new item in the cache - if self.size > 0: - super(Cache, self).__setitem__(key, item) - self._update_usage(key) - __setitem__ = locked(_acquire, _release)(__setitem__) - - def __delitem__(self, key): - super(Cache, self).__delitem__(key) - self._usage.remove(key) - __delitem__ = locked(_acquire, _release)(__delitem__) - - def clear(self): - super(Cache, self).clear() - self._usage = [] - clear = locked(_acquire, _release)(clear) - - def pop(self, key, default=_marker): - if key in self: - self._usage.remove(key) - #if default is _marker: - # return super(Cache, self).pop(key) - return super(Cache, self).pop(key, default) - pop = locked(_acquire, _release)(pop) - - def popitem(self): - raise NotImplementedError() - - def setdefault(self, key, default=None): - raise NotImplementedError() - - def update(self, other): - raise NotImplementedError() - - diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/changelog.py b/pymode/libs/logilab-common-1.4.1/logilab/common/changelog.py deleted file mode 100644 index 3f62bd4c..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/changelog.py +++ /dev/null @@ -1,249 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with logilab-common. If not, see . -"""Manipulation of upstream change log files. - -The upstream change log files format handled is simpler than the one -often used such as those generated by the default Emacs changelog mode. - -Sample ChangeLog format:: - - Change log for project Yoo - ========================== - - -- - * add a new functionality - - 2002-02-01 -- 0.1.1 - * fix bug #435454 - * fix bug #434356 - - 2002-01-01 -- 0.1 - * initial release - - -There is 3 entries in this change log, one for each released version and one -for the next version (i.e. the current entry). -Each entry contains a set of messages corresponding to changes done in this -release. -All the non empty lines before the first entry are considered as the change -log title. -""" - -__docformat__ = "restructuredtext en" - -import sys -from stat import S_IWRITE -import codecs - -from six import string_types - -BULLET = '*' -SUBBULLET = '-' -INDENT = ' ' * 4 - - -class NoEntry(Exception): - """raised when we are unable to find an entry""" - - -class EntryNotFound(Exception): - """raised when we are unable to find a given entry""" - - -class Version(tuple): - """simple class to handle soft version number has a tuple while - correctly printing it as X.Y.Z - """ - def __new__(cls, versionstr): - if isinstance(versionstr, string_types): - versionstr = versionstr.strip(' :') # XXX (syt) duh? - parsed = cls.parse(versionstr) - else: - parsed = versionstr - return tuple.__new__(cls, parsed) - - @classmethod - def parse(cls, versionstr): - versionstr = versionstr.strip(' :') - try: - return [int(i) for i in versionstr.split('.')] - except ValueError as ex: - raise ValueError("invalid literal for version '%s' (%s)" % - (versionstr, ex)) - - def __str__(self): - return '.'.join([str(i) for i in self]) - - -# upstream change log ######################################################### - -class ChangeLogEntry(object): - """a change log entry, i.e. a set of messages associated to a version and - its release date - """ - version_class = Version - - def __init__(self, date=None, version=None, **kwargs): - self.__dict__.update(kwargs) - if version: - self.version = self.version_class(version) - else: - self.version = None - self.date = date - self.messages = [] - - def add_message(self, msg): - """add a new message""" - self.messages.append(([msg], [])) - - def complete_latest_message(self, msg_suite): - """complete the latest added message - """ - if not self.messages: - raise ValueError('unable to complete last message as ' - 'there is no previous message)') - if self.messages[-1][1]: # sub messages - self.messages[-1][1][-1].append(msg_suite) - else: # message - self.messages[-1][0].append(msg_suite) - - def add_sub_message(self, sub_msg, key=None): - if not self.messages: - raise ValueError('unable to complete last message as ' - 'there is no previous message)') - if key is None: - self.messages[-1][1].append([sub_msg]) - else: - raise NotImplementedError('sub message to specific key ' - 'are not implemented yet') - - def write(self, stream=sys.stdout): - """write the entry to file """ - stream.write(u'%s -- %s\n' % (self.date or '', self.version or '')) - for msg, sub_msgs in self.messages: - stream.write(u'%s%s %s\n' % (INDENT, BULLET, msg[0])) - stream.write(u''.join(msg[1:])) - if sub_msgs: - stream.write(u'\n') - for sub_msg in sub_msgs: - stream.write(u'%s%s %s\n' % - (INDENT * 2, SUBBULLET, sub_msg[0])) - stream.write(u''.join(sub_msg[1:])) - stream.write(u'\n') - - stream.write(u'\n\n') - - -class ChangeLog(object): - """object representation of a whole ChangeLog file""" - - entry_class = ChangeLogEntry - - def __init__(self, changelog_file, title=u''): - self.file = changelog_file - assert isinstance(title, type(u'')), 'title must be a unicode object' - self.title = title - self.additional_content = u'' - self.entries = [] - self.load() - - def __repr__(self): - return '' % (self.file, id(self), - len(self.entries)) - - def add_entry(self, entry): - """add a new entry to the change log""" - self.entries.append(entry) - - def get_entry(self, version='', create=None): - """ return a given changelog entry - if version is omitted, return the current entry - """ - if not self.entries: - if version or not create: - raise NoEntry() - self.entries.append(self.entry_class()) - if not version: - if self.entries[0].version and create is not None: - self.entries.insert(0, self.entry_class()) - return self.entries[0] - version = self.version_class(version) - for entry in self.entries: - if entry.version == version: - return entry - raise EntryNotFound() - - def add(self, msg, create=None): - """add a new message to the latest opened entry""" - entry = self.get_entry(create=create) - entry.add_message(msg) - - def load(self): - """ read a logilab's ChangeLog from file """ - try: - stream = codecs.open(self.file, encoding='utf-8') - except IOError: - return - last = None - expect_sub = False - for line in stream: - sline = line.strip() - words = sline.split() - # if new entry - if len(words) == 1 and words[0] == '--': - expect_sub = False - last = self.entry_class() - self.add_entry(last) - # if old entry - elif len(words) == 3 and words[1] == '--': - expect_sub = False - last = self.entry_class(words[0], words[2]) - self.add_entry(last) - # if title - elif sline and last is None: - self.title = '%s%s' % (self.title, line) - # if new entry - elif sline and sline[0] == BULLET: - expect_sub = False - last.add_message(sline[1:].strip()) - # if new sub_entry - elif expect_sub and sline and sline[0] == SUBBULLET: - last.add_sub_message(sline[1:].strip()) - # if new line for current entry - elif sline and last.messages: - last.complete_latest_message(line) - else: - expect_sub = True - self.additional_content += line - stream.close() - - def format_title(self): - return u'%s\n\n' % self.title.strip() - - def save(self): - """write back change log""" - # filetutils isn't importable in appengine, so import locally - from logilab.common.fileutils import ensure_fs_mode - ensure_fs_mode(self.file, S_IWRITE) - self.write(codecs.open(self.file, 'w', encoding='utf-8')) - - def write(self, stream=sys.stdout): - """write changelog to stream""" - stream.write(self.format_title()) - for entry in self.entries: - entry.write(stream) diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/clcommands.py b/pymode/libs/logilab-common-1.4.1/logilab/common/clcommands.py deleted file mode 100644 index 4778b99b..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/clcommands.py +++ /dev/null @@ -1,334 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Helper functions to support command line tools providing more than -one command. - -e.g called as "tool command [options] args..." where and are -command'specific -""" - -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -import sys -import logging -from os.path import basename - -from logilab.common.configuration import Configuration -from logilab.common.logging_ext import init_log, get_threshold -from logilab.common.deprecation import deprecated - - -class BadCommandUsage(Exception): - """Raised when an unknown command is used or when a command is not - correctly used (bad options, too much / missing arguments...). - - Trigger display of command usage. - """ - -class CommandError(Exception): - """Raised when a command can't be processed and we want to display it and - exit, without traceback nor usage displayed. - """ - - -# command line access point #################################################### - -class CommandLine(dict): - """Usage: - - >>> LDI = cli.CommandLine('ldi', doc='Logilab debian installer', - version=version, rcfile=RCFILE) - >>> LDI.register(MyCommandClass) - >>> LDI.register(MyOtherCommandClass) - >>> LDI.run(sys.argv[1:]) - - Arguments: - - * `pgm`, the program name, default to `basename(sys.argv[0])` - - * `doc`, a short description of the command line tool - - * `copyright`, additional doc string that will be appended to the generated - doc - - * `version`, version number of string of the tool. If specified, global - --version option will be available. - - * `rcfile`, path to a configuration file. If specified, global --C/--rc-file - option will be available? self.rcfile = rcfile - - * `logger`, logger to propagate to commands, default to - `logging.getLogger(self.pgm))` - """ - def __init__(self, pgm=None, doc=None, copyright=None, version=None, - rcfile=None, logthreshold=logging.ERROR, - check_duplicated_command=True): - if pgm is None: - pgm = basename(sys.argv[0]) - self.pgm = pgm - self.doc = doc - self.copyright = copyright - self.version = version - self.rcfile = rcfile - self.logger = None - self.logthreshold = logthreshold - self.check_duplicated_command = check_duplicated_command - - def register(self, cls, force=False): - """register the given :class:`Command` subclass""" - assert not self.check_duplicated_command or force or not cls.name in self, \ - 'a command %s is already defined' % cls.name - self[cls.name] = cls - return cls - - def run(self, args): - """main command line access point: - * init logging - * handle global options (-h/--help, --version, -C/--rc-file) - * check command - * run command - - Terminate by :exc:`SystemExit` - """ - init_log(debug=True, # so that we use StreamHandler - logthreshold=self.logthreshold, - logformat='%(levelname)s: %(message)s') - try: - arg = args.pop(0) - except IndexError: - self.usage_and_exit(1) - if arg in ('-h', '--help'): - self.usage_and_exit(0) - if self.version is not None and arg in ('--version'): - print(self.version) - sys.exit(0) - rcfile = self.rcfile - if rcfile is not None and arg in ('-C', '--rc-file'): - try: - rcfile = args.pop(0) - arg = args.pop(0) - except IndexError: - self.usage_and_exit(1) - try: - command = self.get_command(arg) - except KeyError: - print('ERROR: no %s command' % arg) - print() - self.usage_and_exit(1) - try: - sys.exit(command.main_run(args, rcfile)) - except KeyboardInterrupt as exc: - print('Interrupted', end=' ') - if str(exc): - print(': %s' % exc, end=' ') - print() - sys.exit(4) - except BadCommandUsage as err: - print('ERROR:', err) - print() - print(command.help()) - sys.exit(1) - - def create_logger(self, handler, logthreshold=None): - logger = logging.Logger(self.pgm) - logger.handlers = [handler] - if logthreshold is None: - logthreshold = get_threshold(self.logthreshold) - logger.setLevel(logthreshold) - return logger - - def get_command(self, cmd, logger=None): - if logger is None: - logger = self.logger - if logger is None: - logger = self.logger = logging.getLogger(self.pgm) - logger.setLevel(get_threshold(self.logthreshold)) - return self[cmd](logger) - - def usage(self): - """display usage for the main program (i.e. when no command supplied) - and exit - """ - print('usage:', self.pgm, end=' ') - if self.rcfile: - print('[--rc-file=]', end=' ') - print(' [options] ...') - if self.doc: - print('\n%s' % self.doc) - print(''' -Type "%(pgm)s --help" for more information about a specific -command. Available commands are :\n''' % self.__dict__) - max_len = max([len(cmd) for cmd in self]) - padding = ' ' * max_len - for cmdname, cmd in sorted(self.items()): - if not cmd.hidden: - print(' ', (cmdname + padding)[:max_len], cmd.short_description()) - if self.rcfile: - print(''' -Use --rc-file= / -C before the command -to specify a configuration file. Default to %s. -''' % self.rcfile) - print('''%(pgm)s -h/--help - display this usage information and exit''' % self.__dict__) - if self.version: - print('''%(pgm)s -v/--version - display version configuration and exit''' % self.__dict__) - if self.copyright: - print('\n', self.copyright) - - def usage_and_exit(self, status): - self.usage() - sys.exit(status) - - -# base command classes ######################################################### - -class Command(Configuration): - """Base class for command line commands. - - Class attributes: - - * `name`, the name of the command - - * `min_args`, minimum number of arguments, None if unspecified - - * `max_args`, maximum number of arguments, None if unspecified - - * `arguments`, string describing arguments, used in command usage - - * `hidden`, boolean flag telling if the command should be hidden, e.g. does - not appear in help's commands list - - * `options`, options list, as allowed by :mod:configuration - """ - - arguments = '' - name = '' - # hidden from help ? - hidden = False - # max/min args, None meaning unspecified - min_args = None - max_args = None - - @classmethod - def description(cls): - return cls.__doc__.replace(' ', '') - - @classmethod - def short_description(cls): - return cls.description().split('.')[0] - - def __init__(self, logger): - usage = '%%prog %s %s\n\n%s' % (self.name, self.arguments, - self.description()) - Configuration.__init__(self, usage=usage) - self.logger = logger - - def check_args(self, args): - """check command's arguments are provided""" - if self.min_args is not None and len(args) < self.min_args: - raise BadCommandUsage('missing argument') - if self.max_args is not None and len(args) > self.max_args: - raise BadCommandUsage('too many arguments') - - def main_run(self, args, rcfile=None): - """Run the command and return status 0 if everything went fine. - - If :exc:`CommandError` is raised by the underlying command, simply log - the error and return status 2. - - Any other exceptions, including :exc:`BadCommandUsage` will be - propagated. - """ - if rcfile: - self.load_file_configuration(rcfile) - args = self.load_command_line_configuration(args) - try: - self.check_args(args) - self.run(args) - except CommandError as err: - self.logger.error(err) - return 2 - return 0 - - def run(self, args): - """run the command with its specific arguments""" - raise NotImplementedError() - - -class ListCommandsCommand(Command): - """list available commands, useful for bash completion.""" - name = 'listcommands' - arguments = '[command]' - hidden = True - - def run(self, args): - """run the command with its specific arguments""" - if args: - command = args.pop() - cmd = _COMMANDS[command] - for optname, optdict in cmd.options: - print('--help') - print('--' + optname) - else: - commands = sorted(_COMMANDS.keys()) - for command in commands: - cmd = _COMMANDS[command] - if not cmd.hidden: - print(command) - - -# deprecated stuff ############################################################# - -_COMMANDS = CommandLine() - -DEFAULT_COPYRIGHT = '''\ -Copyright (c) 2004-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -http://www.logilab.fr/ -- mailto:contact@logilab.fr''' - -@deprecated('use cls.register(cli)') -def register_commands(commands): - """register existing commands""" - for command_klass in commands: - _COMMANDS.register(command_klass) - -@deprecated('use args.pop(0)') -def main_run(args, doc=None, copyright=None, version=None): - """command line tool: run command specified by argument list (without the - program name). Raise SystemExit with status 0 if everything went fine. - - >>> main_run(sys.argv[1:]) - """ - _COMMANDS.doc = doc - _COMMANDS.copyright = copyright - _COMMANDS.version = version - _COMMANDS.run(args) - -@deprecated('use args.pop(0)') -def pop_arg(args_list, expected_size_after=None, msg="Missing argument"): - """helper function to get and check command line arguments""" - try: - value = args_list.pop(0) - except IndexError: - raise BadCommandUsage(msg) - if expected_size_after is not None and len(args_list) > expected_size_after: - raise BadCommandUsage('too many arguments') - return value - diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/compat.py b/pymode/libs/logilab-common-1.4.1/logilab/common/compat.py deleted file mode 100644 index f2eb5905..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/compat.py +++ /dev/null @@ -1,78 +0,0 @@ -# pylint: disable=E0601,W0622,W0611 -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Wrappers around some builtins introduced in python 2.3, 2.4 and -2.5, making them available in for earlier versions of python. - -See another compatibility snippets from other projects: - - :mod:`lib2to3.fixes` - :mod:`coverage.backward` - :mod:`unittest2.compatibility` -""" - - -__docformat__ = "restructuredtext en" - -import os -import sys -import types -from warnings import warn - -# not used here, but imported to preserve API -from six.moves import builtins - -if sys.version_info < (3, 0): - str_to_bytes = str - def str_encode(string, encoding): - if isinstance(string, unicode): - return string.encode(encoding) - return str(string) -else: - def str_to_bytes(string): - return str.encode(string) - # we have to ignore the encoding in py3k to be able to write a string into a - # TextIOWrapper or like object (which expect an unicode string) - def str_encode(string, encoding): - return str(string) - -# See also http://bugs.python.org/issue11776 -if sys.version_info[0] == 3: - def method_type(callable, instance, klass): - # api change. klass is no more considered - return types.MethodType(callable, instance) -else: - # alias types otherwise - method_type = types.MethodType - -# Pythons 2 and 3 differ on where to get StringIO -if sys.version_info < (3, 0): - from cStringIO import StringIO - FileIO = file - BytesIO = StringIO - reload = reload -else: - from io import FileIO, BytesIO, StringIO - from imp import reload - -from logilab.common.deprecation import deprecated - -# Other projects import these from here, keep providing them for -# backwards compat -any = deprecated('use builtin "any"')(any) -all = deprecated('use builtin "all"')(all) diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/configuration.py b/pymode/libs/logilab-common-1.4.1/logilab/common/configuration.py deleted file mode 100644 index 7a54f1af..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/configuration.py +++ /dev/null @@ -1,1108 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Classes to handle advanced configuration in simple to complex applications. - -Allows to load the configuration from a file or from command line -options, to generate a sample configuration file or to display -program's usage. Fills the gap between optik/optparse and ConfigParser -by adding data types (which are also available as a standalone optik -extension in the `optik_ext` module). - - -Quick start: simplest usage ---------------------------- - -.. python :: - - >>> import sys - >>> from logilab.common.configuration import Configuration - >>> options = [('dothis', {'type':'yn', 'default': True, 'metavar': ''}), - ... ('value', {'type': 'string', 'metavar': ''}), - ... ('multiple', {'type': 'csv', 'default': ('yop',), - ... 'metavar': '', - ... 'help': 'you can also document the option'}), - ... ('number', {'type': 'int', 'default':2, 'metavar':''}), - ... ] - >>> config = Configuration(options=options, name='My config') - >>> print config['dothis'] - True - >>> print config['value'] - None - >>> print config['multiple'] - ('yop',) - >>> print config['number'] - 2 - >>> print config.help() - Usage: [options] - - Options: - -h, --help show this help message and exit - --dothis= - --value= - --multiple= - you can also document the option [current: none] - --number= - - >>> f = open('myconfig.ini', 'w') - >>> f.write('''[MY CONFIG] - ... number = 3 - ... dothis = no - ... multiple = 1,2,3 - ... ''') - >>> f.close() - >>> config.load_file_configuration('myconfig.ini') - >>> print config['dothis'] - False - >>> print config['value'] - None - >>> print config['multiple'] - ['1', '2', '3'] - >>> print config['number'] - 3 - >>> sys.argv = ['mon prog', '--value', 'bacon', '--multiple', '4,5,6', - ... 'nonoptionargument'] - >>> print config.load_command_line_configuration() - ['nonoptionargument'] - >>> print config['value'] - bacon - >>> config.generate_config() - # class for simple configurations which don't need the - # manager / providers model and prefer delegation to inheritance - # - # configuration values are accessible through a dict like interface - # - [MY CONFIG] - - dothis=no - - value=bacon - - # you can also document the option - multiple=4,5,6 - - number=3 - - Note : starting with Python 2.7 ConfigParser is able to take into - account the order of occurrences of the options into a file (by - using an OrderedDict). If you have two options changing some common - state, like a 'disable-all-stuff' and a 'enable-some-stuff-a', their - order of appearance will be significant : the last specified in the - file wins. For earlier version of python and logilab.common newer - than 0.61 the behaviour is unspecified. - -""" - -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -__all__ = ('OptionsManagerMixIn', 'OptionsProviderMixIn', - 'ConfigurationMixIn', 'Configuration', - 'OptionsManager2ConfigurationAdapter') - -import os -import sys -import re -from os.path import exists, expanduser -from copy import copy -from warnings import warn - -from six import integer_types, string_types -from six.moves import range, configparser as cp, input - -from logilab.common.compat import str_encode as _encode -from logilab.common.deprecation import deprecated -from logilab.common.textutils import normalize_text, unquote -from logilab.common import optik_ext - -OptionError = optik_ext.OptionError - -REQUIRED = [] - -class UnsupportedAction(Exception): - """raised by set_option when it doesn't know what to do for an action""" - - -def _get_encoding(encoding, stream): - encoding = encoding or getattr(stream, 'encoding', None) - if not encoding: - import locale - encoding = locale.getpreferredencoding() - return encoding - - -# validation functions ######################################################## - -# validators will return the validated value or raise optparse.OptionValueError -# XXX add to documentation - -def choice_validator(optdict, name, value): - """validate and return a converted value for option of type 'choice' - """ - if not value in optdict['choices']: - msg = "option %s: invalid value: %r, should be in %s" - raise optik_ext.OptionValueError(msg % (name, value, optdict['choices'])) - return value - -def multiple_choice_validator(optdict, name, value): - """validate and return a converted value for option of type 'choice' - """ - choices = optdict['choices'] - values = optik_ext.check_csv(None, name, value) - for value in values: - if not value in choices: - msg = "option %s: invalid value: %r, should be in %s" - raise optik_ext.OptionValueError(msg % (name, value, choices)) - return values - -def csv_validator(optdict, name, value): - """validate and return a converted value for option of type 'csv' - """ - return optik_ext.check_csv(None, name, value) - -def yn_validator(optdict, name, value): - """validate and return a converted value for option of type 'yn' - """ - return optik_ext.check_yn(None, name, value) - -def named_validator(optdict, name, value): - """validate and return a converted value for option of type 'named' - """ - return optik_ext.check_named(None, name, value) - -def file_validator(optdict, name, value): - """validate and return a filepath for option of type 'file'""" - return optik_ext.check_file(None, name, value) - -def color_validator(optdict, name, value): - """validate and return a valid color for option of type 'color'""" - return optik_ext.check_color(None, name, value) - -def password_validator(optdict, name, value): - """validate and return a string for option of type 'password'""" - return optik_ext.check_password(None, name, value) - -def date_validator(optdict, name, value): - """validate and return a mx DateTime object for option of type 'date'""" - return optik_ext.check_date(None, name, value) - -def time_validator(optdict, name, value): - """validate and return a time object for option of type 'time'""" - return optik_ext.check_time(None, name, value) - -def bytes_validator(optdict, name, value): - """validate and return an integer for option of type 'bytes'""" - return optik_ext.check_bytes(None, name, value) - - -VALIDATORS = {'string': unquote, - 'int': int, - 'float': float, - 'file': file_validator, - 'font': unquote, - 'color': color_validator, - 'regexp': re.compile, - 'csv': csv_validator, - 'yn': yn_validator, - 'bool': yn_validator, - 'named': named_validator, - 'password': password_validator, - 'date': date_validator, - 'time': time_validator, - 'bytes': bytes_validator, - 'choice': choice_validator, - 'multiple_choice': multiple_choice_validator, - } - -def _call_validator(opttype, optdict, option, value): - if opttype not in VALIDATORS: - raise Exception('Unsupported type "%s"' % opttype) - try: - return VALIDATORS[opttype](optdict, option, value) - except TypeError: - try: - return VALIDATORS[opttype](value) - except optik_ext.OptionValueError: - raise - except: - raise optik_ext.OptionValueError('%s value (%r) should be of type %s' % - (option, value, opttype)) - -# user input functions ######################################################## - -# user input functions will ask the user for input on stdin then validate -# the result and return the validated value or raise optparse.OptionValueError -# XXX add to documentation - -def input_password(optdict, question='password:'): - from getpass import getpass - while True: - value = getpass(question) - value2 = getpass('confirm: ') - if value == value2: - return value - print('password mismatch, try again') - -def input_string(optdict, question): - value = input(question).strip() - return value or None - -def _make_input_function(opttype): - def input_validator(optdict, question): - while True: - value = input(question) - if not value.strip(): - return None - try: - return _call_validator(opttype, optdict, None, value) - except optik_ext.OptionValueError as ex: - msg = str(ex).split(':', 1)[-1].strip() - print('bad value: %s' % msg) - return input_validator - -INPUT_FUNCTIONS = { - 'string': input_string, - 'password': input_password, - } - -for opttype in VALIDATORS.keys(): - INPUT_FUNCTIONS.setdefault(opttype, _make_input_function(opttype)) - -# utility functions ############################################################ - -def expand_default(self, option): - """monkey patch OptionParser.expand_default since we have a particular - way to handle defaults to avoid overriding values in the configuration - file - """ - if self.parser is None or not self.default_tag: - return option.help - optname = option._long_opts[0][2:] - try: - provider = self.parser.options_manager._all_options[optname] - except KeyError: - value = None - else: - optdict = provider.get_option_def(optname) - optname = provider.option_attrname(optname, optdict) - value = getattr(provider.config, optname, optdict) - value = format_option_value(optdict, value) - if value is optik_ext.NO_DEFAULT or not value: - value = self.NO_DEFAULT_VALUE - return option.help.replace(self.default_tag, str(value)) - - -def _validate(value, optdict, name=''): - """return a validated value for an option according to its type - - optional argument name is only used for error message formatting - """ - try: - _type = optdict['type'] - except KeyError: - # FIXME - return value - return _call_validator(_type, optdict, name, value) -convert = deprecated('[0.60] convert() was renamed _validate()')(_validate) - -# format and output functions ################################################## - -def comment(string): - """return string as a comment""" - lines = [line.strip() for line in string.splitlines()] - return '# ' + ('%s# ' % os.linesep).join(lines) - -def format_time(value): - if not value: - return '0' - if value != int(value): - return '%.2fs' % value - value = int(value) - nbmin, nbsec = divmod(value, 60) - if nbsec: - return '%ss' % value - nbhour, nbmin_ = divmod(nbmin, 60) - if nbmin_: - return '%smin' % nbmin - nbday, nbhour_ = divmod(nbhour, 24) - if nbhour_: - return '%sh' % nbhour - return '%sd' % nbday - -def format_bytes(value): - if not value: - return '0' - if value != int(value): - return '%.2fB' % value - value = int(value) - prevunit = 'B' - for unit in ('KB', 'MB', 'GB', 'TB'): - next, remain = divmod(value, 1024) - if remain: - return '%s%s' % (value, prevunit) - prevunit = unit - value = next - return '%s%s' % (value, unit) - -def format_option_value(optdict, value): - """return the user input's value from a 'compiled' value""" - if isinstance(value, (list, tuple)): - value = ','.join(value) - elif isinstance(value, dict): - value = ','.join(['%s:%s' % (k, v) for k, v in value.items()]) - elif hasattr(value, 'match'): # optdict.get('type') == 'regexp' - # compiled regexp - value = value.pattern - elif optdict.get('type') == 'yn': - value = value and 'yes' or 'no' - elif isinstance(value, string_types) and value.isspace(): - value = "'%s'" % value - elif optdict.get('type') == 'time' and isinstance(value, (float, ) + integer_types): - value = format_time(value) - elif optdict.get('type') == 'bytes' and hasattr(value, '__int__'): - value = format_bytes(value) - return value - -def ini_format_section(stream, section, options, encoding=None, doc=None): - """format an options section using the INI format""" - encoding = _get_encoding(encoding, stream) - if doc: - print(_encode(comment(doc), encoding), file=stream) - print('[%s]' % section, file=stream) - ini_format(stream, options, encoding) - -def ini_format(stream, options, encoding): - """format options using the INI format""" - for optname, optdict, value in options: - value = format_option_value(optdict, value) - help = optdict.get('help') - if help: - help = normalize_text(help, line_len=79, indent='# ') - print(file=stream) - print(_encode(help, encoding), file=stream) - else: - print(file=stream) - if value is None: - print('#%s=' % optname, file=stream) - else: - value = _encode(value, encoding).strip() - if optdict.get('type') == 'string' and '\n' in value: - prefix = '\n ' - value = prefix + prefix.join(value.split('\n')) - print('%s=%s' % (optname, value), file=stream) - -format_section = ini_format_section - -def rest_format_section(stream, section, options, encoding=None, doc=None): - """format an options section using as ReST formatted output""" - encoding = _get_encoding(encoding, stream) - if section: - print('%s\n%s' % (section, "'"*len(section)), file=stream) - if doc: - print(_encode(normalize_text(doc, line_len=79, indent=''), encoding), file=stream) - print(file=stream) - for optname, optdict, value in options: - help = optdict.get('help') - print(':%s:' % optname, file=stream) - if help: - help = normalize_text(help, line_len=79, indent=' ') - print(_encode(help, encoding), file=stream) - if value: - value = _encode(format_option_value(optdict, value), encoding) - print(file=stream) - print(' Default: ``%s``' % value.replace("`` ", "```` ``"), file=stream) - -# Options Manager ############################################################## - -class OptionsManagerMixIn(object): - """MixIn to handle a configuration from both a configuration file and - command line options - """ - - def __init__(self, usage, config_file=None, version=None, quiet=0): - self.config_file = config_file - self.reset_parsers(usage, version=version) - # list of registered options providers - self.options_providers = [] - # dictionary associating option name to checker - self._all_options = {} - self._short_options = {} - self._nocallback_options = {} - self._mygroups = dict() - # verbosity - self.quiet = quiet - self._maxlevel = 0 - - def reset_parsers(self, usage='', version=None): - # configuration file parser - self.cfgfile_parser = cp.ConfigParser() - # command line parser - self.cmdline_parser = optik_ext.OptionParser(usage=usage, version=version) - self.cmdline_parser.options_manager = self - self._optik_option_attrs = set(self.cmdline_parser.option_class.ATTRS) - - def register_options_provider(self, provider, own_group=True): - """register an options provider""" - assert provider.priority <= 0, "provider's priority can't be >= 0" - for i in range(len(self.options_providers)): - if provider.priority > self.options_providers[i].priority: - self.options_providers.insert(i, provider) - break - else: - self.options_providers.append(provider) - non_group_spec_options = [option for option in provider.options - if 'group' not in option[1]] - groups = getattr(provider, 'option_groups', ()) - if own_group and non_group_spec_options: - self.add_option_group(provider.name.upper(), provider.__doc__, - non_group_spec_options, provider) - else: - for opt, optdict in non_group_spec_options: - self.add_optik_option(provider, self.cmdline_parser, opt, optdict) - for gname, gdoc in groups: - gname = gname.upper() - goptions = [option for option in provider.options - if option[1].get('group', '').upper() == gname] - self.add_option_group(gname, gdoc, goptions, provider) - - def add_option_group(self, group_name, doc, options, provider): - """add an option group including the listed options - """ - assert options - # add option group to the command line parser - if group_name in self._mygroups: - group = self._mygroups[group_name] - else: - group = optik_ext.OptionGroup(self.cmdline_parser, - title=group_name.capitalize()) - self.cmdline_parser.add_option_group(group) - group.level = provider.level - self._mygroups[group_name] = group - # add section to the config file - if group_name != "DEFAULT": - self.cfgfile_parser.add_section(group_name) - # add provider's specific options - for opt, optdict in options: - self.add_optik_option(provider, group, opt, optdict) - - def add_optik_option(self, provider, optikcontainer, opt, optdict): - if 'inputlevel' in optdict: - warn('[0.50] "inputlevel" in option dictionary for %s is deprecated,' - ' use "level"' % opt, DeprecationWarning) - optdict['level'] = optdict.pop('inputlevel') - args, optdict = self.optik_option(provider, opt, optdict) - option = optikcontainer.add_option(*args, **optdict) - self._all_options[opt] = provider - self._maxlevel = max(self._maxlevel, option.level or 0) - - def optik_option(self, provider, opt, optdict): - """get our personal option definition and return a suitable form for - use with optik/optparse - """ - optdict = copy(optdict) - others = {} - if 'action' in optdict: - self._nocallback_options[provider] = opt - else: - optdict['action'] = 'callback' - optdict['callback'] = self.cb_set_provider_option - # default is handled here and *must not* be given to optik if you - # want the whole machinery to work - if 'default' in optdict: - if ('help' in optdict - and optdict.get('default') is not None - and not optdict['action'] in ('store_true', 'store_false')): - optdict['help'] += ' [current: %default]' - del optdict['default'] - args = ['--' + str(opt)] - if 'short' in optdict: - self._short_options[optdict['short']] = opt - args.append('-' + optdict['short']) - del optdict['short'] - # cleanup option definition dict before giving it to optik - for key in list(optdict.keys()): - if not key in self._optik_option_attrs: - optdict.pop(key) - return args, optdict - - def cb_set_provider_option(self, option, opt, value, parser): - """optik callback for option setting""" - if opt.startswith('--'): - # remove -- on long option - opt = opt[2:] - else: - # short option, get its long equivalent - opt = self._short_options[opt[1:]] - # trick since we can't set action='store_true' on options - if value is None: - value = 1 - self.global_set_option(opt, value) - - def global_set_option(self, opt, value): - """set option on the correct option provider""" - self._all_options[opt].set_option(opt, value) - - def generate_config(self, stream=None, skipsections=(), encoding=None): - """write a configuration file according to the current configuration - into the given stream or stdout - """ - options_by_section = {} - sections = [] - for provider in self.options_providers: - for section, options in provider.options_by_section(): - if section is None: - section = provider.name - if section in skipsections: - continue - options = [(n, d, v) for (n, d, v) in options - if d.get('type') is not None] - if not options: - continue - if not section in sections: - sections.append(section) - alloptions = options_by_section.setdefault(section, []) - alloptions += options - stream = stream or sys.stdout - encoding = _get_encoding(encoding, stream) - printed = False - for section in sections: - if printed: - print('\n', file=stream) - format_section(stream, section.upper(), options_by_section[section], - encoding) - printed = True - - def generate_manpage(self, pkginfo, section=1, stream=None): - """write a man page for the current configuration into the given - stream or stdout - """ - self._monkeypatch_expand_default() - try: - optik_ext.generate_manpage(self.cmdline_parser, pkginfo, - section, stream=stream or sys.stdout, - level=self._maxlevel) - finally: - self._unmonkeypatch_expand_default() - - # initialization methods ################################################## - - def load_provider_defaults(self): - """initialize configuration using default values""" - for provider in self.options_providers: - provider.load_defaults() - - def load_file_configuration(self, config_file=None): - """load the configuration from file""" - self.read_config_file(config_file) - self.load_config_file() - - def read_config_file(self, config_file=None): - """read the configuration file but do not load it (i.e. dispatching - values to each options provider) - """ - helplevel = 1 - while helplevel <= self._maxlevel: - opt = '-'.join(['long'] * helplevel) + '-help' - if opt in self._all_options: - break # already processed - def helpfunc(option, opt, val, p, level=helplevel): - print(self.help(level)) - sys.exit(0) - helpmsg = '%s verbose help.' % ' '.join(['more'] * helplevel) - optdict = {'action' : 'callback', 'callback' : helpfunc, - 'help' : helpmsg} - provider = self.options_providers[0] - self.add_optik_option(provider, self.cmdline_parser, opt, optdict) - provider.options += ( (opt, optdict), ) - helplevel += 1 - if config_file is None: - config_file = self.config_file - if config_file is not None: - config_file = expanduser(config_file) - if config_file and exists(config_file): - parser = self.cfgfile_parser - parser.read([config_file]) - # normalize sections'title - for sect, values in list(parser._sections.items()): - if not sect.isupper() and values: - parser._sections[sect.upper()] = values - elif not self.quiet: - msg = 'No config file found, using default configuration' - print(msg, file=sys.stderr) - return - - def input_config(self, onlysection=None, inputlevel=0, stream=None): - """interactively get configuration values by asking to the user and generate - a configuration file - """ - if onlysection is not None: - onlysection = onlysection.upper() - for provider in self.options_providers: - for section, option, optdict in provider.all_options(): - if onlysection is not None and section != onlysection: - continue - if not 'type' in optdict: - # ignore action without type (callback, store_true...) - continue - provider.input_option(option, optdict, inputlevel) - # now we can generate the configuration file - if stream is not None: - self.generate_config(stream) - - def load_config_file(self): - """dispatch values previously read from a configuration file to each - options provider) - """ - parser = self.cfgfile_parser - for section in parser.sections(): - for option, value in parser.items(section): - try: - self.global_set_option(option, value) - except (KeyError, OptionError): - # TODO handle here undeclared options appearing in the config file - continue - - def load_configuration(self, **kwargs): - """override configuration according to given parameters - """ - for opt, opt_value in kwargs.items(): - opt = opt.replace('_', '-') - provider = self._all_options[opt] - provider.set_option(opt, opt_value) - - def load_command_line_configuration(self, args=None): - """override configuration according to command line parameters - - return additional arguments - """ - self._monkeypatch_expand_default() - try: - if args is None: - args = sys.argv[1:] - else: - args = list(args) - (options, args) = self.cmdline_parser.parse_args(args=args) - for provider in self._nocallback_options.keys(): - config = provider.config - for attr in config.__dict__.keys(): - value = getattr(options, attr, None) - if value is None: - continue - setattr(config, attr, value) - return args - finally: - self._unmonkeypatch_expand_default() - - - # help methods ############################################################ - - def add_help_section(self, title, description, level=0): - """add a dummy option section for help purpose """ - group = optik_ext.OptionGroup(self.cmdline_parser, - title=title.capitalize(), - description=description) - group.level = level - self._maxlevel = max(self._maxlevel, level) - self.cmdline_parser.add_option_group(group) - - def _monkeypatch_expand_default(self): - # monkey patch optik_ext to deal with our default values - try: - self.__expand_default_backup = optik_ext.HelpFormatter.expand_default - optik_ext.HelpFormatter.expand_default = expand_default - except AttributeError: - # python < 2.4: nothing to be done - pass - def _unmonkeypatch_expand_default(self): - # remove monkey patch - if hasattr(optik_ext.HelpFormatter, 'expand_default'): - # unpatch optik_ext to avoid side effects - optik_ext.HelpFormatter.expand_default = self.__expand_default_backup - - def help(self, level=0): - """return the usage string for available options """ - self.cmdline_parser.formatter.output_level = level - self._monkeypatch_expand_default() - try: - return self.cmdline_parser.format_help() - finally: - self._unmonkeypatch_expand_default() - - -class Method(object): - """used to ease late binding of default method (so you can define options - on the class using default methods on the configuration instance) - """ - def __init__(self, methname): - self.method = methname - self._inst = None - - def bind(self, instance): - """bind the method to its instance""" - if self._inst is None: - self._inst = instance - - def __call__(self, *args, **kwargs): - assert self._inst, 'unbound method' - return getattr(self._inst, self.method)(*args, **kwargs) - -# Options Provider ############################################################# - -class OptionsProviderMixIn(object): - """Mixin to provide options to an OptionsManager""" - - # those attributes should be overridden - priority = -1 - name = 'default' - options = () - level = 0 - - def __init__(self): - self.config = optik_ext.Values() - for option in self.options: - try: - option, optdict = option - except ValueError: - raise Exception('Bad option: %r' % option) - if isinstance(optdict.get('default'), Method): - optdict['default'].bind(self) - elif isinstance(optdict.get('callback'), Method): - optdict['callback'].bind(self) - self.load_defaults() - - def load_defaults(self): - """initialize the provider using default values""" - for opt, optdict in self.options: - action = optdict.get('action') - if action != 'callback': - # callback action have no default - default = self.option_default(opt, optdict) - if default is REQUIRED: - continue - self.set_option(opt, default, action, optdict) - - def option_default(self, opt, optdict=None): - """return the default value for an option""" - if optdict is None: - optdict = self.get_option_def(opt) - default = optdict.get('default') - if callable(default): - default = default() - return default - - def option_attrname(self, opt, optdict=None): - """get the config attribute corresponding to opt - """ - if optdict is None: - optdict = self.get_option_def(opt) - return optdict.get('dest', opt.replace('-', '_')) - option_name = deprecated('[0.60] OptionsProviderMixIn.option_name() was renamed to option_attrname()')(option_attrname) - - def option_value(self, opt): - """get the current value for the given option""" - return getattr(self.config, self.option_attrname(opt), None) - - def set_option(self, opt, value, action=None, optdict=None): - """method called to set an option (registered in the options list) - """ - if optdict is None: - optdict = self.get_option_def(opt) - if value is not None: - value = _validate(value, optdict, opt) - if action is None: - action = optdict.get('action', 'store') - if optdict.get('type') == 'named': # XXX need specific handling - optname = self.option_attrname(opt, optdict) - currentvalue = getattr(self.config, optname, None) - if currentvalue: - currentvalue.update(value) - value = currentvalue - if action == 'store': - setattr(self.config, self.option_attrname(opt, optdict), value) - elif action in ('store_true', 'count'): - setattr(self.config, self.option_attrname(opt, optdict), 0) - elif action == 'store_false': - setattr(self.config, self.option_attrname(opt, optdict), 1) - elif action == 'append': - opt = self.option_attrname(opt, optdict) - _list = getattr(self.config, opt, None) - if _list is None: - if isinstance(value, (list, tuple)): - _list = value - elif value is not None: - _list = [] - _list.append(value) - setattr(self.config, opt, _list) - elif isinstance(_list, tuple): - setattr(self.config, opt, _list + (value,)) - else: - _list.append(value) - elif action == 'callback': - optdict['callback'](None, opt, value, None) - else: - raise UnsupportedAction(action) - - def input_option(self, option, optdict, inputlevel=99): - default = self.option_default(option, optdict) - if default is REQUIRED: - defaultstr = '(required): ' - elif optdict.get('level', 0) > inputlevel: - return - elif optdict['type'] == 'password' or default is None: - defaultstr = ': ' - else: - defaultstr = '(default: %s): ' % format_option_value(optdict, default) - print(':%s:' % option) - print(optdict.get('help') or option) - inputfunc = INPUT_FUNCTIONS[optdict['type']] - value = inputfunc(optdict, defaultstr) - while default is REQUIRED and not value: - print('please specify a value') - value = inputfunc(optdict, '%s: ' % option) - if value is None and default is not None: - value = default - self.set_option(option, value, optdict=optdict) - - def get_option_def(self, opt): - """return the dictionary defining an option given it's name""" - assert self.options - for option in self.options: - if option[0] == opt: - return option[1] - raise OptionError('no such option %s in section %r' - % (opt, self.name), opt) - - - def all_options(self): - """return an iterator on available options for this provider - option are actually described by a 3-uple: - (section, option name, option dictionary) - """ - for section, options in self.options_by_section(): - if section is None: - if self.name is None: - continue - section = self.name.upper() - for option, optiondict, value in options: - yield section, option, optiondict - - def options_by_section(self): - """return an iterator on options grouped by section - - (section, [list of (optname, optdict, optvalue)]) - """ - sections = {} - for optname, optdict in self.options: - sections.setdefault(optdict.get('group'), []).append( - (optname, optdict, self.option_value(optname))) - if None in sections: - yield None, sections.pop(None) - for section, options in sorted(sections.items()): - yield section.upper(), options - - def options_and_values(self, options=None): - if options is None: - options = self.options - for optname, optdict in options: - yield (optname, optdict, self.option_value(optname)) - -# configuration ################################################################ - -class ConfigurationMixIn(OptionsManagerMixIn, OptionsProviderMixIn): - """basic mixin for simple configurations which don't need the - manager / providers model - """ - def __init__(self, *args, **kwargs): - if not args: - kwargs.setdefault('usage', '') - kwargs.setdefault('quiet', 1) - OptionsManagerMixIn.__init__(self, *args, **kwargs) - OptionsProviderMixIn.__init__(self) - if not getattr(self, 'option_groups', None): - self.option_groups = [] - for option, optdict in self.options: - try: - gdef = (optdict['group'].upper(), '') - except KeyError: - continue - if not gdef in self.option_groups: - self.option_groups.append(gdef) - self.register_options_provider(self, own_group=False) - - def register_options(self, options): - """add some options to the configuration""" - options_by_group = {} - for optname, optdict in options: - options_by_group.setdefault(optdict.get('group', self.name.upper()), []).append((optname, optdict)) - for group, group_options in options_by_group.items(): - self.add_option_group(group, None, group_options, self) - self.options += tuple(options) - - def load_defaults(self): - OptionsProviderMixIn.load_defaults(self) - - def __iter__(self): - return iter(self.config.__dict__.items()) - - def __getitem__(self, key): - try: - return getattr(self.config, self.option_attrname(key)) - except (optik_ext.OptionValueError, AttributeError): - raise KeyError(key) - - def __setitem__(self, key, value): - self.set_option(key, value) - - def get(self, key, default=None): - try: - return getattr(self.config, self.option_attrname(key)) - except (OptionError, AttributeError): - return default - - -class Configuration(ConfigurationMixIn): - """class for simple configurations which don't need the - manager / providers model and prefer delegation to inheritance - - configuration values are accessible through a dict like interface - """ - - def __init__(self, config_file=None, options=None, name=None, - usage=None, doc=None, version=None): - if options is not None: - self.options = options - if name is not None: - self.name = name - if doc is not None: - self.__doc__ = doc - super(Configuration, self).__init__(config_file=config_file, usage=usage, version=version) - - -class OptionsManager2ConfigurationAdapter(object): - """Adapt an option manager to behave like a - `logilab.common.configuration.Configuration` instance - """ - def __init__(self, provider): - self.config = provider - - def __getattr__(self, key): - return getattr(self.config, key) - - def __getitem__(self, key): - provider = self.config._all_options[key] - try: - return getattr(provider.config, provider.option_attrname(key)) - except AttributeError: - raise KeyError(key) - - def __setitem__(self, key, value): - self.config.global_set_option(self.config.option_attrname(key), value) - - def get(self, key, default=None): - provider = self.config._all_options[key] - try: - return getattr(provider.config, provider.option_attrname(key)) - except AttributeError: - return default - -# other functions ############################################################## - -def read_old_config(newconfig, changes, configfile): - """initialize newconfig from a deprecated configuration file - - possible changes: - * ('renamed', oldname, newname) - * ('moved', option, oldgroup, newgroup) - * ('typechanged', option, oldtype, newvalue) - """ - # build an index of changes - changesindex = {} - for action in changes: - if action[0] == 'moved': - option, oldgroup, newgroup = action[1:] - changesindex.setdefault(option, []).append((action[0], oldgroup, newgroup)) - continue - if action[0] == 'renamed': - oldname, newname = action[1:] - changesindex.setdefault(newname, []).append((action[0], oldname)) - continue - if action[0] == 'typechanged': - option, oldtype, newvalue = action[1:] - changesindex.setdefault(option, []).append((action[0], oldtype, newvalue)) - continue - if action[0] in ('added', 'removed'): - continue # nothing to do here - raise Exception('unknown change %s' % action[0]) - # build a config object able to read the old config - options = [] - for optname, optdef in newconfig.options: - for action in changesindex.pop(optname, ()): - if action[0] == 'moved': - oldgroup, newgroup = action[1:] - optdef = optdef.copy() - optdef['group'] = oldgroup - elif action[0] == 'renamed': - optname = action[1] - elif action[0] == 'typechanged': - oldtype = action[1] - optdef = optdef.copy() - optdef['type'] = oldtype - options.append((optname, optdef)) - if changesindex: - raise Exception('unapplied changes: %s' % changesindex) - oldconfig = Configuration(options=options, name=newconfig.name) - # read the old config - oldconfig.load_file_configuration(configfile) - # apply values reverting changes - changes.reverse() - done = set() - for action in changes: - if action[0] == 'renamed': - oldname, newname = action[1:] - newconfig[newname] = oldconfig[oldname] - done.add(newname) - elif action[0] == 'typechanged': - optname, oldtype, newvalue = action[1:] - newconfig[optname] = newvalue - done.add(optname) - for optname, optdef in newconfig.options: - if optdef.get('type') and not optname in done: - newconfig.set_option(optname, oldconfig[optname], optdict=optdef) - - -def merge_options(options, optgroup=None): - """preprocess a list of options and remove duplicates, returning a new list - (tuple actually) of options. - - Options dictionaries are copied to avoid later side-effect. Also, if - `otpgroup` argument is specified, ensure all options are in the given group. - """ - alloptions = {} - options = list(options) - for i in range(len(options)-1, -1, -1): - optname, optdict = options[i] - if optname in alloptions: - options.pop(i) - alloptions[optname].update(optdict) - else: - optdict = optdict.copy() - options[i] = (optname, optdict) - alloptions[optname] = optdict - if optgroup is not None: - alloptions[optname]['group'] = optgroup - return tuple(options) diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/daemon.py b/pymode/libs/logilab-common-1.4.1/logilab/common/daemon.py deleted file mode 100644 index 40319a43..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/daemon.py +++ /dev/null @@ -1,101 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""A daemonize function (for Unices)""" - -__docformat__ = "restructuredtext en" - -import os -import errno -import signal -import sys -import time -import warnings - -from six.moves import range - -def setugid(user): - """Change process user and group ID - - Argument is a numeric user id or a user name""" - try: - from pwd import getpwuid - passwd = getpwuid(int(user)) - except ValueError: - from pwd import getpwnam - passwd = getpwnam(user) - - if hasattr(os, 'initgroups'): # python >= 2.7 - os.initgroups(passwd.pw_name, passwd.pw_gid) - else: - import ctypes - if ctypes.CDLL(None).initgroups(passwd.pw_name, passwd.pw_gid) < 0: - err = ctypes.c_int.in_dll(ctypes.pythonapi,"errno").value - raise OSError(err, os.strerror(err), 'initgroups') - os.setgid(passwd.pw_gid) - os.setuid(passwd.pw_uid) - os.environ['HOME'] = passwd.pw_dir - - -def daemonize(pidfile=None, uid=None, umask=0o77): - """daemonize a Unix process. Set paranoid umask by default. - - Return 1 in the original process, 2 in the first fork, and None for the - second fork (eg daemon process). - """ - # http://www.faqs.org/faqs/unix-faq/programmer/faq/ - # - # fork so the parent can exit - if os.fork(): # launch child and... - return 1 - # disconnect from tty and create a new session - os.setsid() - # fork again so the parent, (the session group leader), can exit. - # as a non-session group leader, we can never regain a controlling - # terminal. - if os.fork(): # launch child again. - return 2 - # move to the root to avoit mount pb - os.chdir('/') - # redirect standard descriptors - null = os.open('/dev/null', os.O_RDWR) - for i in range(3): - try: - os.dup2(null, i) - except OSError as e: - if e.errno != errno.EBADF: - raise - os.close(null) - # filter warnings - warnings.filterwarnings('ignore') - # write pid in a file - if pidfile: - # ensure the directory where the pid-file should be set exists (for - # instance /var/run/cubicweb may be deleted on computer restart) - piddir = os.path.dirname(pidfile) - if not os.path.exists(piddir): - os.makedirs(piddir) - f = file(pidfile, 'w') - f.write(str(os.getpid())) - f.close() - # set umask if specified - if umask is not None: - os.umask(umask) - # change process uid - if uid: - setugid(uid) - return None diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/date.py b/pymode/libs/logilab-common-1.4.1/logilab/common/date.py deleted file mode 100644 index 1d13a770..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/date.py +++ /dev/null @@ -1,335 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Date manipulation helper functions.""" -from __future__ import division - -__docformat__ = "restructuredtext en" - -import math -import re -import sys -from locale import getlocale, LC_TIME -from datetime import date, time, datetime, timedelta -from time import strptime as time_strptime -from calendar import monthrange, timegm - -from six.moves import range - -try: - from mx.DateTime import RelativeDateTime, Date, DateTimeType -except ImportError: - endOfMonth = None - DateTimeType = datetime -else: - endOfMonth = RelativeDateTime(months=1, day=-1) - -# NOTE: should we implement a compatibility layer between date representations -# as we have in lgc.db ? - -FRENCH_FIXED_HOLIDAYS = { - 'jour_an': '%s-01-01', - 'fete_travail': '%s-05-01', - 'armistice1945': '%s-05-08', - 'fete_nat': '%s-07-14', - 'assomption': '%s-08-15', - 'toussaint': '%s-11-01', - 'armistice1918': '%s-11-11', - 'noel': '%s-12-25', - } - -FRENCH_MOBILE_HOLIDAYS = { - 'paques2004': '2004-04-12', - 'ascension2004': '2004-05-20', - 'pentecote2004': '2004-05-31', - - 'paques2005': '2005-03-28', - 'ascension2005': '2005-05-05', - 'pentecote2005': '2005-05-16', - - 'paques2006': '2006-04-17', - 'ascension2006': '2006-05-25', - 'pentecote2006': '2006-06-05', - - 'paques2007': '2007-04-09', - 'ascension2007': '2007-05-17', - 'pentecote2007': '2007-05-28', - - 'paques2008': '2008-03-24', - 'ascension2008': '2008-05-01', - 'pentecote2008': '2008-05-12', - - 'paques2009': '2009-04-13', - 'ascension2009': '2009-05-21', - 'pentecote2009': '2009-06-01', - - 'paques2010': '2010-04-05', - 'ascension2010': '2010-05-13', - 'pentecote2010': '2010-05-24', - - 'paques2011': '2011-04-25', - 'ascension2011': '2011-06-02', - 'pentecote2011': '2011-06-13', - - 'paques2012': '2012-04-09', - 'ascension2012': '2012-05-17', - 'pentecote2012': '2012-05-28', - } - -# XXX this implementation cries for multimethod dispatching - -def get_step(dateobj, nbdays=1): - # assume date is either a python datetime or a mx.DateTime object - if isinstance(dateobj, date): - return ONEDAY * nbdays - return nbdays # mx.DateTime is ok with integers - -def datefactory(year, month, day, sampledate): - # assume date is either a python datetime or a mx.DateTime object - if isinstance(sampledate, datetime): - return datetime(year, month, day) - if isinstance(sampledate, date): - return date(year, month, day) - return Date(year, month, day) - -def weekday(dateobj): - # assume date is either a python datetime or a mx.DateTime object - if isinstance(dateobj, date): - return dateobj.weekday() - return dateobj.day_of_week - -def str2date(datestr, sampledate): - # NOTE: datetime.strptime is not an option until we drop py2.4 compat - year, month, day = [int(chunk) for chunk in datestr.split('-')] - return datefactory(year, month, day, sampledate) - -def days_between(start, end): - if isinstance(start, date): - delta = end - start - # datetime.timedelta.days is always an integer (floored) - if delta.seconds: - return delta.days + 1 - return delta.days - else: - return int(math.ceil((end - start).days)) - -def get_national_holidays(begin, end): - """return french national days off between begin and end""" - begin = datefactory(begin.year, begin.month, begin.day, begin) - end = datefactory(end.year, end.month, end.day, end) - holidays = [str2date(datestr, begin) - for datestr in FRENCH_MOBILE_HOLIDAYS.values()] - for year in range(begin.year, end.year+1): - for datestr in FRENCH_FIXED_HOLIDAYS.values(): - date = str2date(datestr % year, begin) - if date not in holidays: - holidays.append(date) - return [day for day in holidays if begin <= day < end] - -def add_days_worked(start, days): - """adds date but try to only take days worked into account""" - step = get_step(start) - weeks, plus = divmod(days, 5) - end = start + ((weeks * 7) + plus) * step - if weekday(end) >= 5: # saturday or sunday - end += (2 * step) - end += len([x for x in get_national_holidays(start, end + step) - if weekday(x) < 5]) * step - if weekday(end) >= 5: # saturday or sunday - end += (2 * step) - return end - -def nb_open_days(start, end): - assert start <= end - step = get_step(start) - days = days_between(start, end) - weeks, plus = divmod(days, 7) - if weekday(start) > weekday(end): - plus -= 2 - elif weekday(end) == 6: - plus -= 1 - open_days = weeks * 5 + plus - nb_week_holidays = len([x for x in get_national_holidays(start, end+step) - if weekday(x) < 5 and x < end]) - open_days -= nb_week_holidays - if open_days < 0: - return 0 - return open_days - -def date_range(begin, end, incday=None, incmonth=None): - """yields each date between begin and end - - :param begin: the start date - :param end: the end date - :param incr: the step to use to iterate over dates. Default is - one day. - :param include: None (means no exclusion) or a function taking a - date as parameter, and returning True if the date - should be included. - - When using mx datetime, you should *NOT* use incmonth argument, use instead - oneDay, oneHour, oneMinute, oneSecond, oneWeek or endOfMonth (to enumerate - months) as `incday` argument - """ - assert not (incday and incmonth) - begin = todate(begin) - end = todate(end) - if incmonth: - while begin < end: - yield begin - begin = next_month(begin, incmonth) - else: - incr = get_step(begin, incday or 1) - while begin < end: - yield begin - begin += incr - -# makes py datetime usable ##################################################### - -ONEDAY = timedelta(days=1) -ONEWEEK = timedelta(days=7) - -try: - strptime = datetime.strptime -except AttributeError: # py < 2.5 - from time import strptime as time_strptime - def strptime(value, format): - return datetime(*time_strptime(value, format)[:6]) - -def strptime_time(value, format='%H:%M'): - return time(*time_strptime(value, format)[3:6]) - -def todate(somedate): - """return a date from a date (leaving unchanged) or a datetime""" - if isinstance(somedate, datetime): - return date(somedate.year, somedate.month, somedate.day) - assert isinstance(somedate, (date, DateTimeType)), repr(somedate) - return somedate - -def totime(somedate): - """return a time from a time (leaving unchanged), date or datetime""" - # XXX mx compat - if not isinstance(somedate, time): - return time(somedate.hour, somedate.minute, somedate.second) - assert isinstance(somedate, (time)), repr(somedate) - return somedate - -def todatetime(somedate): - """return a date from a date (leaving unchanged) or a datetime""" - # take care, datetime is a subclass of date - if isinstance(somedate, datetime): - return somedate - assert isinstance(somedate, (date, DateTimeType)), repr(somedate) - return datetime(somedate.year, somedate.month, somedate.day) - -def datetime2ticks(somedate): - return timegm(somedate.timetuple()) * 1000 + int(getattr(somedate, 'microsecond', 0) / 1000) - -def ticks2datetime(ticks): - miliseconds, microseconds = divmod(ticks, 1000) - try: - return datetime.fromtimestamp(miliseconds) - except (ValueError, OverflowError): - epoch = datetime.fromtimestamp(0) - nb_days, seconds = divmod(int(miliseconds), 86400) - delta = timedelta(nb_days, seconds=seconds, microseconds=microseconds) - try: - return epoch + delta - except (ValueError, OverflowError): - raise - -def days_in_month(somedate): - return monthrange(somedate.year, somedate.month)[1] - -def days_in_year(somedate): - feb = date(somedate.year, 2, 1) - if days_in_month(feb) == 29: - return 366 - else: - return 365 - -def previous_month(somedate, nbmonth=1): - while nbmonth: - somedate = first_day(somedate) - ONEDAY - nbmonth -= 1 - return somedate - -def next_month(somedate, nbmonth=1): - while nbmonth: - somedate = last_day(somedate) + ONEDAY - nbmonth -= 1 - return somedate - -def first_day(somedate): - return date(somedate.year, somedate.month, 1) - -def last_day(somedate): - return date(somedate.year, somedate.month, days_in_month(somedate)) - -def ustrftime(somedate, fmt='%Y-%m-%d'): - """like strftime, but returns a unicode string instead of an encoded - string which may be problematic with localized date. - """ - if sys.version_info >= (3, 3): - # datetime.date.strftime() supports dates since year 1 in Python >=3.3. - return somedate.strftime(fmt) - else: - try: - if sys.version_info < (3, 0): - encoding = getlocale(LC_TIME)[1] or 'ascii' - return unicode(somedate.strftime(str(fmt)), encoding) - else: - return somedate.strftime(fmt) - except ValueError: - if somedate.year >= 1900: - raise - # datetime is not happy with dates before 1900 - # we try to work around this, assuming a simple - # format string - fields = {'Y': somedate.year, - 'm': somedate.month, - 'd': somedate.day, - } - if isinstance(somedate, datetime): - fields.update({'H': somedate.hour, - 'M': somedate.minute, - 'S': somedate.second}) - fmt = re.sub('%([YmdHMS])', r'%(\1)02d', fmt) - return unicode(fmt) % fields - -def utcdatetime(dt): - if dt.tzinfo is None: - return dt - return (dt.replace(tzinfo=None) - dt.utcoffset()) - -def utctime(dt): - if dt.tzinfo is None: - return dt - return (dt + dt.utcoffset() + dt.dst()).replace(tzinfo=None) - -def datetime_to_seconds(date): - """return the number of seconds since the begining of the day for that date - """ - return date.second+60*date.minute + 3600*date.hour - -def timedelta_to_days(delta): - """return the time delta as a number of seconds""" - return delta.days + delta.seconds / (3600*24) - -def timedelta_to_seconds(delta): - """return the time delta as a fraction of days""" - return delta.days*(3600*24) + delta.seconds diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/debugger.py b/pymode/libs/logilab-common-1.4.1/logilab/common/debugger.py deleted file mode 100644 index 1f540a18..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/debugger.py +++ /dev/null @@ -1,214 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Customized version of pdb's default debugger. - -- sets up a history file -- uses ipython if available to colorize lines of code -- overrides list command to search for current block instead - of using 5 lines of context - - - - -""" - -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -try: - import readline -except ImportError: - readline = None -import os -import os.path as osp -import sys -from pdb import Pdb -import inspect - -from logilab.common.compat import StringIO - -try: - from IPython import PyColorize -except ImportError: - def colorize(source, *args): - """fallback colorize function""" - return source - def colorize_source(source, *args): - return source -else: - def colorize(source, start_lineno, curlineno): - """colorize and annotate source with linenos - (as in pdb's list command) - """ - parser = PyColorize.Parser() - output = StringIO() - parser.format(source, output) - annotated = [] - for index, line in enumerate(output.getvalue().splitlines()): - lineno = index + start_lineno - if lineno == curlineno: - annotated.append('%4s\t->\t%s' % (lineno, line)) - else: - annotated.append('%4s\t\t%s' % (lineno, line)) - return '\n'.join(annotated) - - def colorize_source(source): - """colorize given source""" - parser = PyColorize.Parser() - output = StringIO() - parser.format(source, output) - return output.getvalue() - - -def getsource(obj): - """Return the text of the source code for an object. - - The argument may be a module, class, method, function, traceback, frame, - or code object. The source code is returned as a single string. An - IOError is raised if the source code cannot be retrieved.""" - lines, lnum = inspect.getsourcelines(obj) - return ''.join(lines), lnum - - -################################################################ -class Debugger(Pdb): - """custom debugger - - - sets up a history file - - uses ipython if available to colorize lines of code - - overrides list command to search for current block instead - of using 5 lines of context - """ - def __init__(self, tcbk=None): - Pdb.__init__(self) - self.reset() - if tcbk: - while tcbk.tb_next is not None: - tcbk = tcbk.tb_next - self._tcbk = tcbk - self._histfile = os.path.expanduser("~/.pdbhist") - - def setup_history_file(self): - """if readline is available, read pdb history file - """ - if readline is not None: - try: - # XXX try..except shouldn't be necessary - # read_history_file() can accept None - readline.read_history_file(self._histfile) - except IOError: - pass - - def start(self): - """starts the interactive mode""" - self.interaction(self._tcbk.tb_frame, self._tcbk) - - def setup(self, frame, tcbk): - """setup hook: set up history file""" - self.setup_history_file() - Pdb.setup(self, frame, tcbk) - - def set_quit(self): - """quit hook: save commands in the history file""" - if readline is not None: - readline.write_history_file(self._histfile) - Pdb.set_quit(self) - - def complete_p(self, text, line, begin_idx, end_idx): - """provide variable names completion for the ``p`` command""" - namespace = dict(self.curframe.f_globals) - namespace.update(self.curframe.f_locals) - if '.' in text: - return self.attr_matches(text, namespace) - return [varname for varname in namespace if varname.startswith(text)] - - - def attr_matches(self, text, namespace): - """implementation coming from rlcompleter.Completer.attr_matches - Compute matches when text contains a dot. - - Assuming the text is of the form NAME.NAME....[NAME], and is - evaluatable in self.namespace, it will be evaluated and its attributes - (as revealed by dir()) are used as possible completions. (For class - instances, class members are also considered.) - - WARNING: this can still invoke arbitrary C code, if an object - with a __getattr__ hook is evaluated. - - """ - import re - m = re.match(r"(\w+(\.\w+)*)\.(\w*)", text) - if not m: - return - expr, attr = m.group(1, 3) - object = eval(expr, namespace) - words = dir(object) - if hasattr(object, '__class__'): - words.append('__class__') - words = words + self.get_class_members(object.__class__) - matches = [] - n = len(attr) - for word in words: - if word[:n] == attr and word != "__builtins__": - matches.append("%s.%s" % (expr, word)) - return matches - - def get_class_members(self, klass): - """implementation coming from rlcompleter.get_class_members""" - ret = dir(klass) - if hasattr(klass, '__bases__'): - for base in klass.__bases__: - ret = ret + self.get_class_members(base) - return ret - - ## specific / overridden commands - def do_list(self, arg): - """overrides default list command to display the surrounding block - instead of 5 lines of context - """ - self.lastcmd = 'list' - if not arg: - try: - source, start_lineno = getsource(self.curframe) - print(colorize(''.join(source), start_lineno, - self.curframe.f_lineno)) - except KeyboardInterrupt: - pass - except IOError: - Pdb.do_list(self, arg) - else: - Pdb.do_list(self, arg) - do_l = do_list - - def do_open(self, arg): - """opens source file corresponding to the current stack level""" - filename = self.curframe.f_code.co_filename - lineno = self.curframe.f_lineno - cmd = 'emacsclient --no-wait +%s %s' % (lineno, filename) - os.system(cmd) - - do_o = do_open - -def pm(): - """use our custom debugger""" - dbg = Debugger(sys.last_traceback) - dbg.start() - -def set_trace(): - Debugger().set_trace(sys._getframe().f_back) diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/decorators.py b/pymode/libs/logilab-common-1.4.1/logilab/common/decorators.py deleted file mode 100644 index beafa202..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/decorators.py +++ /dev/null @@ -1,281 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -""" A few useful function/method decorators. """ - -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -import sys -import types -from time import clock, time -from inspect import isgeneratorfunction, getargspec - -from logilab.common.compat import method_type - -# XXX rewrite so we can use the decorator syntax when keyarg has to be specified - -class cached_decorator(object): - def __init__(self, cacheattr=None, keyarg=None): - self.cacheattr = cacheattr - self.keyarg = keyarg - def __call__(self, callableobj=None): - assert not isgeneratorfunction(callableobj), \ - 'cannot cache generator function: %s' % callableobj - if len(getargspec(callableobj).args) == 1 or self.keyarg == 0: - cache = _SingleValueCache(callableobj, self.cacheattr) - elif self.keyarg: - cache = _MultiValuesKeyArgCache(callableobj, self.keyarg, self.cacheattr) - else: - cache = _MultiValuesCache(callableobj, self.cacheattr) - return cache.closure() - -class _SingleValueCache(object): - def __init__(self, callableobj, cacheattr=None): - self.callable = callableobj - if cacheattr is None: - self.cacheattr = '_%s_cache_' % callableobj.__name__ - else: - assert cacheattr != callableobj.__name__ - self.cacheattr = cacheattr - - def __call__(__me, self, *args): - try: - return self.__dict__[__me.cacheattr] - except KeyError: - value = __me.callable(self, *args) - setattr(self, __me.cacheattr, value) - return value - - def closure(self): - def wrapped(*args, **kwargs): - return self.__call__(*args, **kwargs) - wrapped.cache_obj = self - try: - wrapped.__doc__ = self.callable.__doc__ - wrapped.__name__ = self.callable.__name__ - except: - pass - return wrapped - - def clear(self, holder): - holder.__dict__.pop(self.cacheattr, None) - - -class _MultiValuesCache(_SingleValueCache): - def _get_cache(self, holder): - try: - _cache = holder.__dict__[self.cacheattr] - except KeyError: - _cache = {} - setattr(holder, self.cacheattr, _cache) - return _cache - - def __call__(__me, self, *args, **kwargs): - _cache = __me._get_cache(self) - try: - return _cache[args] - except KeyError: - _cache[args] = __me.callable(self, *args) - return _cache[args] - -class _MultiValuesKeyArgCache(_MultiValuesCache): - def __init__(self, callableobj, keyarg, cacheattr=None): - super(_MultiValuesKeyArgCache, self).__init__(callableobj, cacheattr) - self.keyarg = keyarg - - def __call__(__me, self, *args, **kwargs): - _cache = __me._get_cache(self) - key = args[__me.keyarg-1] - try: - return _cache[key] - except KeyError: - _cache[key] = __me.callable(self, *args, **kwargs) - return _cache[key] - - -def cached(callableobj=None, keyarg=None, **kwargs): - """Simple decorator to cache result of method call.""" - kwargs['keyarg'] = keyarg - decorator = cached_decorator(**kwargs) - if callableobj is None: - return decorator - else: - return decorator(callableobj) - - -class cachedproperty(object): - """ Provides a cached property equivalent to the stacking of - @cached and @property, but more efficient. - - After first usage, the becomes part of the object's - __dict__. Doing: - - del obj. empties the cache. - - Idea taken from the pyramid_ framework and the mercurial_ project. - - .. _pyramid: http://pypi.python.org/pypi/pyramid - .. _mercurial: http://pypi.python.org/pypi/Mercurial - """ - __slots__ = ('wrapped',) - - def __init__(self, wrapped): - try: - wrapped.__name__ - except AttributeError: - raise TypeError('%s must have a __name__ attribute' % - wrapped) - self.wrapped = wrapped - - @property - def __doc__(self): - doc = getattr(self.wrapped, '__doc__', None) - return ('%s' - % ('\n%s' % doc if doc else '')) - - def __get__(self, inst, objtype=None): - if inst is None: - return self - val = self.wrapped(inst) - setattr(inst, self.wrapped.__name__, val) - return val - - -def get_cache_impl(obj, funcname): - cls = obj.__class__ - member = getattr(cls, funcname) - if isinstance(member, property): - member = member.fget - return member.cache_obj - -def clear_cache(obj, funcname): - """Clear a cache handled by the :func:`cached` decorator. If 'x' class has - @cached on its method `foo`, type - - >>> clear_cache(x, 'foo') - - to purge this method's cache on the instance. - """ - get_cache_impl(obj, funcname).clear(obj) - -def copy_cache(obj, funcname, cacheobj): - """Copy cache for from cacheobj to obj.""" - cacheattr = get_cache_impl(obj, funcname).cacheattr - try: - setattr(obj, cacheattr, cacheobj.__dict__[cacheattr]) - except KeyError: - pass - - -class wproperty(object): - """Simple descriptor expecting to take a modifier function as first argument - and looking for a _ to retrieve the attribute. - """ - def __init__(self, setfunc): - self.setfunc = setfunc - self.attrname = '_%s' % setfunc.__name__ - - def __set__(self, obj, value): - self.setfunc(obj, value) - - def __get__(self, obj, cls): - assert obj is not None - return getattr(obj, self.attrname) - - -class classproperty(object): - """this is a simple property-like class but for class attributes. - """ - def __init__(self, get): - self.get = get - def __get__(self, inst, cls): - return self.get(cls) - - -class iclassmethod(object): - '''Descriptor for method which should be available as class method if called - on the class or instance method if called on an instance. - ''' - def __init__(self, func): - self.func = func - def __get__(self, instance, objtype): - if instance is None: - return method_type(self.func, objtype, objtype.__class__) - return method_type(self.func, instance, objtype) - def __set__(self, instance, value): - raise AttributeError("can't set attribute") - - -def timed(f): - def wrap(*args, **kwargs): - t = time() - c = clock() - res = f(*args, **kwargs) - print('%s clock: %.9f / time: %.9f' % (f.__name__, - clock() - c, time() - t)) - return res - return wrap - - -def locked(acquire, release): - """Decorator taking two methods to acquire/release a lock as argument, - returning a decorator function which will call the inner method after - having called acquire(self) et will call release(self) afterwards. - """ - def decorator(f): - def wrapper(self, *args, **kwargs): - acquire(self) - try: - return f(self, *args, **kwargs) - finally: - release(self) - return wrapper - return decorator - - -def monkeypatch(klass, methodname=None): - """Decorator extending class with the decorated callable. This is basically - a syntactic sugar vs class assignment. - - >>> class A: - ... pass - >>> @monkeypatch(A) - ... def meth(self): - ... return 12 - ... - >>> a = A() - >>> a.meth() - 12 - >>> @monkeypatch(A, 'foo') - ... def meth(self): - ... return 12 - ... - >>> a.foo() - 12 - """ - def decorator(func): - try: - name = methodname or func.__name__ - except AttributeError: - raise AttributeError('%s has no __name__ attribute: ' - 'you should provide an explicit `methodname`' - % func) - setattr(klass, name, func) - return func - return decorator diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/deprecation.py b/pymode/libs/logilab-common-1.4.1/logilab/common/deprecation.py deleted file mode 100644 index 1c81b638..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/deprecation.py +++ /dev/null @@ -1,189 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Deprecation utilities.""" - -__docformat__ = "restructuredtext en" - -import sys -from warnings import warn - -from logilab.common.changelog import Version - - -class DeprecationWrapper(object): - """proxy to print a warning on access to any attribute of the wrapped object - """ - def __init__(self, proxied, msg=None): - self._proxied = proxied - self._msg = msg - - def __getattr__(self, attr): - warn(self._msg, DeprecationWarning, stacklevel=2) - return getattr(self._proxied, attr) - - def __setattr__(self, attr, value): - if attr in ('_proxied', '_msg'): - self.__dict__[attr] = value - else: - warn(self._msg, DeprecationWarning, stacklevel=2) - setattr(self._proxied, attr, value) - - -class DeprecationManager(object): - """Manage the deprecation message handling. Messages are dropped for - versions more recent than the 'compatible' version. Example:: - - deprecator = deprecation.DeprecationManager("module_name") - deprecator.compatibility('1.3') - - deprecator.warn('1.2', "message.") - - @deprecator.deprecated('1.2', 'Message') - def any_func(): - pass - - class AnyClass(object): - __metaclass__ = deprecator.class_deprecated('1.2') - """ - def __init__(self, module_name=None): - """ - """ - self.module_name = module_name - self.compatible_version = None - - def compatibility(self, compatible_version): - """Set the compatible version. - """ - self.compatible_version = Version(compatible_version) - - def deprecated(self, version=None, reason=None, stacklevel=2, name=None, doc=None): - """Display a deprecation message only if the version is older than the - compatible version. - """ - def decorator(func): - message = reason or 'The function "%s" is deprecated' - if '%s' in message: - message %= func.__name__ - def wrapped(*args, **kwargs): - self.warn(version, message, stacklevel+1) - return func(*args, **kwargs) - return wrapped - return decorator - - def class_deprecated(self, version=None): - class metaclass(type): - """metaclass to print a warning on instantiation of a deprecated class""" - - def __call__(cls, *args, **kwargs): - msg = getattr(cls, "__deprecation_warning__", - "%(cls)s is deprecated") % {'cls': cls.__name__} - self.warn(version, msg, stacklevel=3) - return type.__call__(cls, *args, **kwargs) - return metaclass - - def moved(self, version, modpath, objname): - """use to tell that a callable has been moved to a new module. - - It returns a callable wrapper, so that when its called a warning is printed - telling where the object can be found, import is done (and not before) and - the actual object is called. - - NOTE: the usage is somewhat limited on classes since it will fail if the - wrapper is use in a class ancestors list, use the `class_moved` function - instead (which has no lazy import feature though). - """ - def callnew(*args, **kwargs): - from logilab.common.modutils import load_module_from_name - message = "object %s has been moved to module %s" % (objname, modpath) - self.warn(version, message) - m = load_module_from_name(modpath) - return getattr(m, objname)(*args, **kwargs) - return callnew - - def class_renamed(self, version, old_name, new_class, message=None): - clsdict = {} - if message is None: - message = '%s is deprecated, use %s' % (old_name, new_class.__name__) - clsdict['__deprecation_warning__'] = message - try: - # new-style class - return self.class_deprecated(version)(old_name, (new_class,), clsdict) - except (NameError, TypeError): - # old-style class - warn = self.warn - class DeprecatedClass(new_class): - """FIXME: There might be a better way to handle old/new-style class - """ - def __init__(self, *args, **kwargs): - warn(version, message, stacklevel=3) - new_class.__init__(self, *args, **kwargs) - return DeprecatedClass - - def class_moved(self, version, new_class, old_name=None, message=None): - """nice wrapper around class_renamed when a class has been moved into - another module - """ - if old_name is None: - old_name = new_class.__name__ - if message is None: - message = 'class %s is now available as %s.%s' % ( - old_name, new_class.__module__, new_class.__name__) - return self.class_renamed(version, old_name, new_class, message) - - def warn(self, version=None, reason="", stacklevel=2): - """Display a deprecation message only if the version is older than the - compatible version. - """ - if (self.compatible_version is None - or version is None - or Version(version) < self.compatible_version): - if self.module_name and version: - reason = '[%s %s] %s' % (self.module_name, version, reason) - elif self.module_name: - reason = '[%s] %s' % (self.module_name, reason) - elif version: - reason = '[%s] %s' % (version, reason) - warn(reason, DeprecationWarning, stacklevel=stacklevel) - -_defaultdeprecator = DeprecationManager() - -def deprecated(reason=None, stacklevel=2, name=None, doc=None): - return _defaultdeprecator.deprecated(None, reason, stacklevel, name, doc) - -class_deprecated = _defaultdeprecator.class_deprecated() - -def moved(modpath, objname): - return _defaultdeprecator.moved(None, modpath, objname) -moved.__doc__ = _defaultdeprecator.moved.__doc__ - -def class_renamed(old_name, new_class, message=None): - """automatically creates a class which fires a DeprecationWarning - when instantiated. - - >>> Set = class_renamed('Set', set, 'Set is now replaced by set') - >>> s = Set() - sample.py:57: DeprecationWarning: Set is now replaced by set - s = Set() - >>> - """ - return _defaultdeprecator.class_renamed(None, old_name, new_class, message) - -def class_moved(new_class, old_name=None, message=None): - return _defaultdeprecator.class_moved(None, new_class, old_name, message) -class_moved.__doc__ = _defaultdeprecator.class_moved.__doc__ - diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/fileutils.py b/pymode/libs/logilab-common-1.4.1/logilab/common/fileutils.py deleted file mode 100644 index 93439d3b..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/fileutils.py +++ /dev/null @@ -1,397 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""File and file-path manipulation utilities. - -:group path manipulation: first_level_directory, relative_path, is_binary,\ -get_by_ext, remove_dead_links -:group file manipulation: norm_read, norm_open, lines, stream_lines, lines,\ -write_open_mode, ensure_fs_mode, export -:sort: path manipulation, file manipulation -""" - -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -import io -import sys -import shutil -import mimetypes -from os.path import isabs, isdir, islink, split, exists, normpath, join -from os.path import abspath -from os import sep, mkdir, remove, listdir, stat, chmod, walk -from stat import ST_MODE, S_IWRITE - -from logilab.common import STD_BLACKLIST as BASE_BLACKLIST, IGNORED_EXTENSIONS -from logilab.common.shellutils import find -from logilab.common.deprecation import deprecated -from logilab.common.compat import FileIO - -def first_level_directory(path): - """Return the first level directory of a path. - - >>> first_level_directory('home/syt/work') - 'home' - >>> first_level_directory('/home/syt/work') - '/' - >>> first_level_directory('work') - 'work' - >>> - - :type path: str - :param path: the path for which we want the first level directory - - :rtype: str - :return: the first level directory appearing in `path` - """ - head, tail = split(path) - while head and tail: - head, tail = split(head) - if tail: - return tail - # path was absolute, head is the fs root - return head - -def abspath_listdir(path): - """Lists path's content using absolute paths.""" - path = abspath(path) - return [join(path, filename) for filename in listdir(path)] - - -def is_binary(filename): - """Return true if filename may be a binary file, according to it's - extension. - - :type filename: str - :param filename: the name of the file - - :rtype: bool - :return: - true if the file is a binary file (actually if it's mime type - isn't beginning by text/) - """ - try: - return not mimetypes.guess_type(filename)[0].startswith('text') - except AttributeError: - return 1 - - -def write_open_mode(filename): - """Return the write mode that should used to open file. - - :type filename: str - :param filename: the name of the file - - :rtype: str - :return: the mode that should be use to open the file ('w' or 'wb') - """ - if is_binary(filename): - return 'wb' - return 'w' - - -def ensure_fs_mode(filepath, desired_mode=S_IWRITE): - """Check that the given file has the given mode(s) set, else try to - set it. - - :type filepath: str - :param filepath: path of the file - - :type desired_mode: int - :param desired_mode: - ORed flags describing the desired mode. Use constants from the - `stat` module for file permission's modes - """ - mode = stat(filepath)[ST_MODE] - if not mode & desired_mode: - chmod(filepath, mode | desired_mode) - - -# XXX (syt) unused? kill? -class ProtectedFile(FileIO): - """A special file-object class that automatically does a 'chmod +w' when - needed. - - XXX: for now, the way it is done allows 'normal file-objects' to be - created during the ProtectedFile object lifetime. - One way to circumvent this would be to chmod / unchmod on each - write operation. - - One other way would be to : - - - catch the IOError in the __init__ - - - if IOError, then create a StringIO object - - - each write operation writes in this StringIO object - - - on close()/del(), write/append the StringIO content to the file and - do the chmod only once - """ - def __init__(self, filepath, mode): - self.original_mode = stat(filepath)[ST_MODE] - self.mode_changed = False - if mode in ('w', 'a', 'wb', 'ab'): - if not self.original_mode & S_IWRITE: - chmod(filepath, self.original_mode | S_IWRITE) - self.mode_changed = True - FileIO.__init__(self, filepath, mode) - - def _restore_mode(self): - """restores the original mode if needed""" - if self.mode_changed: - chmod(self.name, self.original_mode) - # Don't re-chmod in case of several restore - self.mode_changed = False - - def close(self): - """restore mode before closing""" - self._restore_mode() - FileIO.close(self) - - def __del__(self): - if not self.closed: - self.close() - - -class UnresolvableError(Exception): - """Exception raised by relative path when it's unable to compute relative - path between two paths. - """ - -def relative_path(from_file, to_file): - """Try to get a relative path from `from_file` to `to_file` - (path will be absolute if to_file is an absolute file). This function - is useful to create link in `from_file` to `to_file`. This typical use - case is used in this function description. - - If both files are relative, they're expected to be relative to the same - directory. - - >>> relative_path( from_file='toto/index.html', to_file='index.html') - '../index.html' - >>> relative_path( from_file='index.html', to_file='toto/index.html') - 'toto/index.html' - >>> relative_path( from_file='tutu/index.html', to_file='toto/index.html') - '../toto/index.html' - >>> relative_path( from_file='toto/index.html', to_file='/index.html') - '/index.html' - >>> relative_path( from_file='/toto/index.html', to_file='/index.html') - '../index.html' - >>> relative_path( from_file='/toto/index.html', to_file='/toto/summary.html') - 'summary.html' - >>> relative_path( from_file='index.html', to_file='index.html') - '' - >>> relative_path( from_file='/index.html', to_file='toto/index.html') - Traceback (most recent call last): - File "", line 1, in ? - File "", line 37, in relative_path - UnresolvableError - >>> relative_path( from_file='/index.html', to_file='/index.html') - '' - >>> - - :type from_file: str - :param from_file: source file (where links will be inserted) - - :type to_file: str - :param to_file: target file (on which links point) - - :raise UnresolvableError: if it has been unable to guess a correct path - - :rtype: str - :return: the relative path of `to_file` from `from_file` - """ - from_file = normpath(from_file) - to_file = normpath(to_file) - if from_file == to_file: - return '' - if isabs(to_file): - if not isabs(from_file): - return to_file - elif isabs(from_file): - raise UnresolvableError() - from_parts = from_file.split(sep) - to_parts = to_file.split(sep) - idem = 1 - result = [] - while len(from_parts) > 1: - dirname = from_parts.pop(0) - if idem and len(to_parts) > 1 and dirname == to_parts[0]: - to_parts.pop(0) - else: - idem = 0 - result.append('..') - result += to_parts - return sep.join(result) - - -def norm_read(path): - """Return the content of the file with normalized line feeds. - - :type path: str - :param path: path to the file to read - - :rtype: str - :return: the content of the file with normalized line feeds - """ - return open(path, 'U').read() -norm_read = deprecated("use \"open(path, 'U').read()\"")(norm_read) - -def norm_open(path): - """Return a stream for a file with content with normalized line feeds. - - :type path: str - :param path: path to the file to open - - :rtype: file or StringIO - :return: the opened file with normalized line feeds - """ - return open(path, 'U') -norm_open = deprecated("use \"open(path, 'U')\"")(norm_open) - -def lines(path, comments=None): - """Return a list of non empty lines in the file located at `path`. - - :type path: str - :param path: path to the file - - :type comments: str or None - :param comments: - optional string which can be used to comment a line in the file - (i.e. lines starting with this string won't be returned) - - :rtype: list - :return: - a list of stripped line in the file, without empty and commented - lines - - :warning: at some point this function will probably return an iterator - """ - with io.open(path) as stream: - return stream_lines(stream, comments) - - -def stream_lines(stream, comments=None): - """Return a list of non empty lines in the given `stream`. - - :type stream: object implementing 'xreadlines' or 'readlines' - :param stream: file like object - - :type comments: str or None - :param comments: - optional string which can be used to comment a line in the file - (i.e. lines starting with this string won't be returned) - - :rtype: list - :return: - a list of stripped line in the file, without empty and commented - lines - - :warning: at some point this function will probably return an iterator - """ - try: - readlines = stream.xreadlines - except AttributeError: - readlines = stream.readlines - result = [] - for line in readlines(): - line = line.strip() - if line and (comments is None or not line.startswith(comments)): - result.append(line) - return result - - -def export(from_dir, to_dir, - blacklist=BASE_BLACKLIST, ignore_ext=IGNORED_EXTENSIONS, - verbose=0): - """Make a mirror of `from_dir` in `to_dir`, omitting directories and - files listed in the black list or ending with one of the given - extensions. - - :type from_dir: str - :param from_dir: directory to export - - :type to_dir: str - :param to_dir: destination directory - - :type blacklist: list or tuple - :param blacklist: - list of files or directories to ignore, default to the content of - `BASE_BLACKLIST` - - :type ignore_ext: list or tuple - :param ignore_ext: - list of extensions to ignore, default to the content of - `IGNORED_EXTENSIONS` - - :type verbose: bool - :param verbose: - flag indicating whether information about exported files should be - printed to stderr, default to False - """ - try: - mkdir(to_dir) - except OSError: - pass # FIXME we should use "exists" if the point is about existing dir - # else (permission problems?) shouldn't return / raise ? - for directory, dirnames, filenames in walk(from_dir): - for norecurs in blacklist: - try: - dirnames.remove(norecurs) - except ValueError: - continue - for dirname in dirnames: - src = join(directory, dirname) - dest = to_dir + src[len(from_dir):] - if isdir(src): - if not exists(dest): - mkdir(dest) - for filename in filenames: - # don't include binary files - # endswith does not accept tuple in 2.4 - if any([filename.endswith(ext) for ext in ignore_ext]): - continue - src = join(directory, filename) - dest = to_dir + src[len(from_dir):] - if verbose: - print(src, '->', dest, file=sys.stderr) - if exists(dest): - remove(dest) - shutil.copy2(src, dest) - - -def remove_dead_links(directory, verbose=0): - """Recursively traverse directory and remove all dead links. - - :type directory: str - :param directory: directory to cleanup - - :type verbose: bool - :param verbose: - flag indicating whether information about deleted links should be - printed to stderr, default to False - """ - for dirpath, dirname, filenames in walk(directory): - for filename in dirnames + filenames: - src = join(dirpath, filename) - if islink(src) and not exists(src): - if verbose: - print('remove dead link', src) - remove(src) - diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/graph.py b/pymode/libs/logilab-common-1.4.1/logilab/common/graph.py deleted file mode 100644 index cef1c984..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/graph.py +++ /dev/null @@ -1,282 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Graph manipulation utilities. - -(dot generation adapted from pypy/translator/tool/make_dot.py) -""" - -__docformat__ = "restructuredtext en" - -__metaclass__ = type - -import os.path as osp -import os -import sys -import tempfile -import codecs -import errno - -def escape(value): - """Make usable in a dot file.""" - lines = [line.replace('"', '\\"') for line in value.split('\n')] - data = '\\l'.join(lines) - return '\\n' + data - -def target_info_from_filename(filename): - """Transforms /some/path/foo.png into ('/some/path', 'foo.png', 'png').""" - basename = osp.basename(filename) - storedir = osp.dirname(osp.abspath(filename)) - target = filename.split('.')[-1] - return storedir, basename, target - - -class DotBackend: - """Dot File backend.""" - def __init__(self, graphname, rankdir=None, size=None, ratio=None, - charset='utf-8', renderer='dot', additionnal_param={}): - self.graphname = graphname - self.renderer = renderer - self.lines = [] - self._source = None - self.emit("digraph %s {" % normalize_node_id(graphname)) - if rankdir: - self.emit('rankdir=%s' % rankdir) - if ratio: - self.emit('ratio=%s' % ratio) - if size: - self.emit('size="%s"' % size) - if charset: - assert charset.lower() in ('utf-8', 'iso-8859-1', 'latin1'), \ - 'unsupported charset %s' % charset - self.emit('charset="%s"' % charset) - for param in sorted(additionnal_param.items()): - self.emit('='.join(param)) - - def get_source(self): - """returns self._source""" - if self._source is None: - self.emit("}\n") - self._source = '\n'.join(self.lines) - del self.lines - return self._source - - source = property(get_source) - - def generate(self, outputfile=None, dotfile=None, mapfile=None): - """Generates a graph file. - - :param outputfile: filename and path [defaults to graphname.png] - :param dotfile: filename and path [defaults to graphname.dot] - - :rtype: str - :return: a path to the generated file - """ - import subprocess # introduced in py 2.4 - name = self.graphname - if not dotfile: - # if 'outputfile' is a dot file use it as 'dotfile' - if outputfile and outputfile.endswith(".dot"): - dotfile = outputfile - else: - dotfile = '%s.dot' % name - if outputfile is not None: - storedir, basename, target = target_info_from_filename(outputfile) - if target != "dot": - pdot, dot_sourcepath = tempfile.mkstemp(".dot", name) - os.close(pdot) - else: - dot_sourcepath = osp.join(storedir, dotfile) - else: - target = 'png' - pdot, dot_sourcepath = tempfile.mkstemp(".dot", name) - ppng, outputfile = tempfile.mkstemp(".png", name) - os.close(pdot) - os.close(ppng) - pdot = codecs.open(dot_sourcepath, 'w', encoding='utf8') - pdot.write(self.source) - pdot.close() - if target != 'dot': - if sys.platform == 'win32': - use_shell = True - else: - use_shell = False - try: - if mapfile: - subprocess.call([self.renderer, '-Tcmapx', '-o', mapfile, '-T', target, dot_sourcepath, '-o', outputfile], - shell=use_shell) - else: - subprocess.call([self.renderer, '-T', target, - dot_sourcepath, '-o', outputfile], - shell=use_shell) - except OSError as e: - if e.errno == errno.ENOENT: - e.strerror = 'File not found: {0}'.format(self.renderer) - raise - os.unlink(dot_sourcepath) - return outputfile - - def emit(self, line): - """Adds to final output.""" - self.lines.append(line) - - def emit_edge(self, name1, name2, **props): - """emit an edge from to . - edge properties: see http://www.graphviz.org/doc/info/attrs.html - """ - attrs = ['%s="%s"' % (prop, value) for prop, value in props.items()] - n_from, n_to = normalize_node_id(name1), normalize_node_id(name2) - self.emit('%s -> %s [%s];' % (n_from, n_to, ', '.join(sorted(attrs))) ) - - def emit_node(self, name, **props): - """emit a node with given properties. - node properties: see http://www.graphviz.org/doc/info/attrs.html - """ - attrs = ['%s="%s"' % (prop, value) for prop, value in props.items()] - self.emit('%s [%s];' % (normalize_node_id(name), ', '.join(sorted(attrs)))) - -def normalize_node_id(nid): - """Returns a suitable DOT node id for `nid`.""" - return '"%s"' % nid - -class GraphGenerator: - def __init__(self, backend): - # the backend is responsible to output the graph in a particular format - self.backend = backend - - # XXX doesn't like space in outpufile / mapfile - def generate(self, visitor, propshdlr, outputfile=None, mapfile=None): - # the visitor - # the property handler is used to get node and edge properties - # according to the graph and to the backend - self.propshdlr = propshdlr - for nodeid, node in visitor.nodes(): - props = propshdlr.node_properties(node) - self.backend.emit_node(nodeid, **props) - for subjnode, objnode, edge in visitor.edges(): - props = propshdlr.edge_properties(edge, subjnode, objnode) - self.backend.emit_edge(subjnode, objnode, **props) - return self.backend.generate(outputfile=outputfile, mapfile=mapfile) - - -class UnorderableGraph(Exception): - pass - -def ordered_nodes(graph): - """takes a dependency graph dict as arguments and return an ordered tuple of - nodes starting with nodes without dependencies and up to the outermost node. - - If there is some cycle in the graph, :exc:`UnorderableGraph` will be raised. - - Also the given graph dict will be emptied. - """ - # check graph consistency - cycles = get_cycles(graph) - if cycles: - cycles = '\n'.join([' -> '.join(cycle) for cycle in cycles]) - raise UnorderableGraph('cycles in graph: %s' % cycles) - vertices = set(graph) - to_vertices = set() - for edges in graph.values(): - to_vertices |= set(edges) - missing_vertices = to_vertices - vertices - if missing_vertices: - raise UnorderableGraph('missing vertices: %s' % ', '.join(missing_vertices)) - # order vertices - order = [] - order_set = set() - old_len = None - while graph: - if old_len == len(graph): - raise UnorderableGraph('unknown problem with %s' % graph) - old_len = len(graph) - deps_ok = [] - for node, node_deps in graph.items(): - for dep in node_deps: - if dep not in order_set: - break - else: - deps_ok.append(node) - order.append(deps_ok) - order_set |= set(deps_ok) - for node in deps_ok: - del graph[node] - result = [] - for grp in reversed(order): - result.extend(sorted(grp)) - return tuple(result) - - -def get_cycles(graph_dict, vertices=None): - '''given a dictionary representing an ordered graph (i.e. key are vertices - and values is a list of destination vertices representing edges), return a - list of detected cycles - ''' - if not graph_dict: - return () - result = [] - if vertices is None: - vertices = graph_dict.keys() - for vertice in vertices: - _get_cycles(graph_dict, [], set(), result, vertice) - return result - -def _get_cycles(graph_dict, path, visited, result, vertice): - """recursive function doing the real work for get_cycles""" - if vertice in path: - cycle = [vertice] - for node in path[::-1]: - if node == vertice: - break - cycle.insert(0, node) - # make a canonical representation - start_from = min(cycle) - index = cycle.index(start_from) - cycle = cycle[index:] + cycle[0:index] - # append it to result if not already in - if not cycle in result: - result.append(cycle) - return - path.append(vertice) - try: - for node in graph_dict[vertice]: - # don't check already visited nodes again - if node not in visited: - _get_cycles(graph_dict, path, visited, result, node) - visited.add(node) - except KeyError: - pass - path.pop() - -def has_path(graph_dict, fromnode, tonode, path=None): - """generic function taking a simple graph definition as a dictionary, with - node has key associated to a list of nodes directly reachable from it. - - Return None if no path exists to go from `fromnode` to `tonode`, else the - first path found (as a list including the destination node at last) - """ - if path is None: - path = [] - elif fromnode in path: - return None - path.append(fromnode) - for destnode in graph_dict[fromnode]: - if destnode == tonode or has_path(graph_dict, destnode, tonode, path): - return path[1:] + [tonode] - path.pop() - return None - diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/interface.py b/pymode/libs/logilab-common-1.4.1/logilab/common/interface.py deleted file mode 100644 index 3ea4ab7e..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/interface.py +++ /dev/null @@ -1,71 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Bases class for interfaces to provide 'light' interface handling. - - TODO: - _ implements a check method which check that an object implements the - interface - _ Attribute objects - - This module requires at least python 2.2 -""" -__docformat__ = "restructuredtext en" - - -class Interface(object): - """Base class for interfaces.""" - def is_implemented_by(cls, instance): - return implements(instance, cls) - is_implemented_by = classmethod(is_implemented_by) - - -def implements(obj, interface): - """Return true if the give object (maybe an instance or class) implements - the interface. - """ - kimplements = getattr(obj, '__implements__', ()) - if not isinstance(kimplements, (list, tuple)): - kimplements = (kimplements,) - for implementedinterface in kimplements: - if issubclass(implementedinterface, interface): - return True - return False - - -def extend(klass, interface, _recurs=False): - """Add interface to klass'__implements__ if not already implemented in. - - If klass is subclassed, ensure subclasses __implements__ it as well. - - NOTE: klass should be e new class. - """ - if not implements(klass, interface): - try: - kimplements = klass.__implements__ - kimplementsklass = type(kimplements) - kimplements = list(kimplements) - except AttributeError: - kimplementsklass = tuple - kimplements = [] - kimplements.append(interface) - klass.__implements__ = kimplementsklass(kimplements) - for subklass in klass.__subclasses__(): - extend(subklass, interface, _recurs=True) - elif _recurs: - for subklass in klass.__subclasses__(): - extend(subklass, interface, _recurs=True) diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/logging_ext.py b/pymode/libs/logilab-common-1.4.1/logilab/common/logging_ext.py deleted file mode 100644 index 3b6a580a..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/logging_ext.py +++ /dev/null @@ -1,195 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Extends the logging module from the standard library.""" - -__docformat__ = "restructuredtext en" - -import os -import sys -import logging - -from six import string_types - -from logilab.common.textutils import colorize_ansi - - -def set_log_methods(cls, logger): - """bind standard logger's methods as methods on the class""" - cls.__logger = logger - for attr in ('debug', 'info', 'warning', 'error', 'critical', 'exception'): - setattr(cls, attr, getattr(logger, attr)) - - -def xxx_cyan(record): - if 'XXX' in record.message: - return 'cyan' - -class ColorFormatter(logging.Formatter): - """ - A color Formatter for the logging standard module. - - By default, colorize CRITICAL and ERROR in red, WARNING in orange, INFO in - green and DEBUG in yellow. - - self.colors is customizable via the 'color' constructor argument (dictionary). - - self.colorfilters is a list of functions that get the LogRecord - and return a color name or None. - """ - - def __init__(self, fmt=None, datefmt=None, colors=None): - logging.Formatter.__init__(self, fmt, datefmt) - self.colorfilters = [] - self.colors = {'CRITICAL': 'red', - 'ERROR': 'red', - 'WARNING': 'magenta', - 'INFO': 'green', - 'DEBUG': 'yellow', - } - if colors is not None: - assert isinstance(colors, dict) - self.colors.update(colors) - - def format(self, record): - msg = logging.Formatter.format(self, record) - if record.levelname in self.colors: - color = self.colors[record.levelname] - return colorize_ansi(msg, color) - else: - for cf in self.colorfilters: - color = cf(record) - if color: - return colorize_ansi(msg, color) - return msg - -def set_color_formatter(logger=None, **kw): - """ - Install a color formatter on the 'logger'. If not given, it will - defaults to the default logger. - - Any additional keyword will be passed as-is to the ColorFormatter - constructor. - """ - if logger is None: - logger = logging.getLogger() - if not logger.handlers: - logging.basicConfig() - format_msg = logger.handlers[0].formatter._fmt - fmt = ColorFormatter(format_msg, **kw) - fmt.colorfilters.append(xxx_cyan) - logger.handlers[0].setFormatter(fmt) - - -LOG_FORMAT = '%(asctime)s - (%(name)s) %(levelname)s: %(message)s' -LOG_DATE_FORMAT = '%Y-%m-%d %H:%M:%S' - -def get_handler(debug=False, syslog=False, logfile=None, rotation_parameters=None): - """get an apropriate handler according to given parameters""" - if os.environ.get('APYCOT_ROOT'): - handler = logging.StreamHandler(sys.stdout) - if debug: - handler = logging.StreamHandler() - elif logfile is None: - if syslog: - from logging import handlers - handler = handlers.SysLogHandler() - else: - handler = logging.StreamHandler() - else: - try: - if rotation_parameters is None: - if os.name == 'posix' and sys.version_info >= (2, 6): - from logging.handlers import WatchedFileHandler - handler = WatchedFileHandler(logfile) - else: - handler = logging.FileHandler(logfile) - else: - from logging.handlers import TimedRotatingFileHandler - handler = TimedRotatingFileHandler( - logfile, **rotation_parameters) - except IOError: - handler = logging.StreamHandler() - return handler - -def get_threshold(debug=False, logthreshold=None): - if logthreshold is None: - if debug: - logthreshold = logging.DEBUG - else: - logthreshold = logging.ERROR - elif isinstance(logthreshold, string_types): - logthreshold = getattr(logging, THRESHOLD_MAP.get(logthreshold, - logthreshold)) - return logthreshold - -def _colorable_terminal(): - isatty = hasattr(sys.__stdout__, 'isatty') and sys.__stdout__.isatty() - if not isatty: - return False - if os.name == 'nt': - try: - from colorama import init as init_win32_colors - except ImportError: - return False - init_win32_colors() - return True - -def get_formatter(logformat=LOG_FORMAT, logdateformat=LOG_DATE_FORMAT): - if _colorable_terminal(): - fmt = ColorFormatter(logformat, logdateformat) - def col_fact(record): - if 'XXX' in record.message: - return 'cyan' - if 'kick' in record.message: - return 'red' - fmt.colorfilters.append(col_fact) - else: - fmt = logging.Formatter(logformat, logdateformat) - return fmt - -def init_log(debug=False, syslog=False, logthreshold=None, logfile=None, - logformat=LOG_FORMAT, logdateformat=LOG_DATE_FORMAT, fmt=None, - rotation_parameters=None, handler=None): - """init the log service""" - logger = logging.getLogger() - if handler is None: - handler = get_handler(debug, syslog, logfile, rotation_parameters) - # only addHandler and removeHandler method while I would like a setHandler - # method, so do it this way :$ - logger.handlers = [handler] - logthreshold = get_threshold(debug, logthreshold) - logger.setLevel(logthreshold) - if fmt is None: - if debug: - fmt = get_formatter(logformat=logformat, logdateformat=logdateformat) - else: - fmt = logging.Formatter(logformat, logdateformat) - handler.setFormatter(fmt) - return handler - -# map logilab.common.logger thresholds to logging thresholds -THRESHOLD_MAP = {'LOG_DEBUG': 'DEBUG', - 'LOG_INFO': 'INFO', - 'LOG_NOTICE': 'INFO', - 'LOG_WARN': 'WARNING', - 'LOG_WARNING': 'WARNING', - 'LOG_ERR': 'ERROR', - 'LOG_ERROR': 'ERROR', - 'LOG_CRIT': 'CRITICAL', - } diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/modutils.py b/pymode/libs/logilab-common-1.4.1/logilab/common/modutils.py deleted file mode 100644 index 030cfa3b..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/modutils.py +++ /dev/null @@ -1,753 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Python modules manipulation utility functions. - -:type PY_SOURCE_EXTS: tuple(str) -:var PY_SOURCE_EXTS: list of possible python source file extension - -:type STD_LIB_DIR: str -:var STD_LIB_DIR: directory where standard modules are located - -:type BUILTIN_MODULES: dict -:var BUILTIN_MODULES: dictionary with builtin module names as key -""" - -__docformat__ = "restructuredtext en" - -import sys -import os -from os.path import (splitext, join, abspath, isdir, dirname, exists, - basename, expanduser, normcase, realpath) -from imp import find_module, load_module, C_BUILTIN, PY_COMPILED, PKG_DIRECTORY -from distutils.sysconfig import get_config_var, get_python_lib, get_python_version -from distutils.errors import DistutilsPlatformError - -from six import PY3 -from six.moves import map, range - -try: - import zipimport -except ImportError: - zipimport = None - -ZIPFILE = object() - -from logilab.common import STD_BLACKLIST, _handle_blacklist -from logilab.common.deprecation import deprecated - -# Notes about STD_LIB_DIR -# Consider arch-specific installation for STD_LIB_DIR definition -# :mod:`distutils.sysconfig` contains to much hardcoded values to rely on -# -# :see: `Problems with /usr/lib64 builds `_ -# :see: `FHS `_ -if sys.platform.startswith('win'): - PY_SOURCE_EXTS = ('py', 'pyw') - PY_COMPILED_EXTS = ('dll', 'pyd') -else: - PY_SOURCE_EXTS = ('py',) - PY_COMPILED_EXTS = ('so',) - -try: - STD_LIB_DIR = get_python_lib(standard_lib=True) -# get_python_lib(standard_lib=1) is not available on pypy, set STD_LIB_DIR to -# non-valid path, see https://bugs.pypy.org/issue1164 -except DistutilsPlatformError: - STD_LIB_DIR = '//' - -EXT_LIB_DIR = get_python_lib() - -BUILTIN_MODULES = dict.fromkeys(sys.builtin_module_names, True) - - -class NoSourceFile(Exception): - """exception raised when we are not able to get a python - source file for a precompiled file - """ - -class LazyObject(object): - def __init__(self, module, obj): - self.module = module - self.obj = obj - self._imported = None - - def _getobj(self): - if self._imported is None: - self._imported = getattr(load_module_from_name(self.module), - self.obj) - return self._imported - - def __getattribute__(self, attr): - try: - return super(LazyObject, self).__getattribute__(attr) - except AttributeError as ex: - return getattr(self._getobj(), attr) - - def __call__(self, *args, **kwargs): - return self._getobj()(*args, **kwargs) - - -def load_module_from_name(dotted_name, path=None, use_sys=True): - """Load a Python module from its name. - - :type dotted_name: str - :param dotted_name: python name of a module or package - - :type path: list or None - :param path: - optional list of path where the module or package should be - searched (use sys.path if nothing or None is given) - - :type use_sys: bool - :param use_sys: - boolean indicating whether the sys.modules dictionary should be - used or not - - - :raise ImportError: if the module or package is not found - - :rtype: module - :return: the loaded module - """ - return load_module_from_modpath(dotted_name.split('.'), path, use_sys) - - -def load_module_from_modpath(parts, path=None, use_sys=True): - """Load a python module from its splitted name. - - :type parts: list(str) or tuple(str) - :param parts: - python name of a module or package splitted on '.' - - :type path: list or None - :param path: - optional list of path where the module or package should be - searched (use sys.path if nothing or None is given) - - :type use_sys: bool - :param use_sys: - boolean indicating whether the sys.modules dictionary should be used or not - - :raise ImportError: if the module or package is not found - - :rtype: module - :return: the loaded module - """ - if use_sys: - try: - return sys.modules['.'.join(parts)] - except KeyError: - pass - modpath = [] - prevmodule = None - for part in parts: - modpath.append(part) - curname = '.'.join(modpath) - module = None - if len(modpath) != len(parts): - # even with use_sys=False, should try to get outer packages from sys.modules - module = sys.modules.get(curname) - elif use_sys: - # because it may have been indirectly loaded through a parent - module = sys.modules.get(curname) - if module is None: - mp_file, mp_filename, mp_desc = find_module(part, path) - try: - module = load_module(curname, mp_file, mp_filename, mp_desc) - finally: - if mp_file is not None: - mp_file.close() - if prevmodule: - setattr(prevmodule, part, module) - _file = getattr(module, '__file__', '') - prevmodule = module - if not _file and _is_namespace(curname): - continue - if not _file and len(modpath) != len(parts): - raise ImportError('no module in %s' % '.'.join(parts[len(modpath):]) ) - path = [dirname( _file )] - return module - - -def load_module_from_file(filepath, path=None, use_sys=True, extrapath=None): - """Load a Python module from it's path. - - :type filepath: str - :param filepath: path to the python module or package - - :type path: list or None - :param path: - optional list of path where the module or package should be - searched (use sys.path if nothing or None is given) - - :type use_sys: bool - :param use_sys: - boolean indicating whether the sys.modules dictionary should be - used or not - - - :raise ImportError: if the module or package is not found - - :rtype: module - :return: the loaded module - """ - modpath = modpath_from_file(filepath, extrapath) - return load_module_from_modpath(modpath, path, use_sys) - - -def _check_init(path, mod_path): - """check there are some __init__.py all along the way""" - modpath = [] - for part in mod_path: - modpath.append(part) - path = join(path, part) - if not _is_namespace('.'.join(modpath)) and not _has_init(path): - return False - return True - - -def _canonicalize_path(path): - return realpath(expanduser(path)) - - -def _path_from_filename(filename): - if PY3: - return filename - else: - if filename.endswith(".pyc"): - return filename[:-1] - return filename - - -@deprecated('you should avoid using modpath_from_file()') -def modpath_from_file(filename, extrapath=None): - """DEPRECATED: doens't play well with symlinks and sys.meta_path - - Given a file path return the corresponding splitted module's name - (i.e name of a module or package splitted on '.') - - :type filename: str - :param filename: file's path for which we want the module's name - - :type extrapath: dict - :param extrapath: - optional extra search path, with path as key and package name for the path - as value. This is usually useful to handle package splitted in multiple - directories using __path__ trick. - - - :raise ImportError: - if the corresponding module's name has not been found - - :rtype: list(str) - :return: the corresponding splitted module's name - """ - filename = _path_from_filename(filename) - filename = _canonicalize_path(filename) - base = os.path.splitext(filename)[0] - - if extrapath is not None: - for path_ in map(_canonicalize_path, extrapath): - path = abspath(path_) - if path and normcase(base[:len(path)]) == normcase(path): - submodpath = [pkg for pkg in base[len(path):].split(os.sep) - if pkg] - if _check_init(path, submodpath[:-1]): - return extrapath[path_].split('.') + submodpath - - for path in map(_canonicalize_path, sys.path): - if path and normcase(base).startswith(path): - modpath = [pkg for pkg in base[len(path):].split(os.sep) if pkg] - if _check_init(path, modpath[:-1]): - return modpath - - raise ImportError('Unable to find module for %s in %s' % ( - filename, ', \n'.join(sys.path))) - - -def file_from_modpath(modpath, path=None, context_file=None): - """given a mod path (i.e. splitted module / package name), return the - corresponding file, giving priority to source file over precompiled - file if it exists - - :type modpath: list or tuple - :param modpath: - splitted module's name (i.e name of a module or package splitted - on '.') - (this means explicit relative imports that start with dots have - empty strings in this list!) - - :type path: list or None - :param path: - optional list of path where the module or package should be - searched (use sys.path if nothing or None is given) - - :type context_file: str or None - :param context_file: - context file to consider, necessary if the identifier has been - introduced using a relative import unresolvable in the actual - context (i.e. modutils) - - :raise ImportError: if there is no such module in the directory - - :rtype: str or None - :return: - the path to the module's file or None if it's an integrated - builtin module such as 'sys' - """ - if context_file is not None: - context = dirname(context_file) - else: - context = context_file - if modpath[0] == 'xml': - # handle _xmlplus - try: - return _file_from_modpath(['_xmlplus'] + modpath[1:], path, context) - except ImportError: - return _file_from_modpath(modpath, path, context) - elif modpath == ['os', 'path']: - # FIXME: currently ignoring search_path... - return os.path.__file__ - return _file_from_modpath(modpath, path, context) - - - -def get_module_part(dotted_name, context_file=None): - """given a dotted name return the module part of the name : - - >>> get_module_part('logilab.common.modutils.get_module_part') - 'logilab.common.modutils' - - :type dotted_name: str - :param dotted_name: full name of the identifier we are interested in - - :type context_file: str or None - :param context_file: - context file to consider, necessary if the identifier has been - introduced using a relative import unresolvable in the actual - context (i.e. modutils) - - - :raise ImportError: if there is no such module in the directory - - :rtype: str or None - :return: - the module part of the name or None if we have not been able at - all to import the given name - - XXX: deprecated, since it doesn't handle package precedence over module - (see #10066) - """ - # os.path trick - if dotted_name.startswith('os.path'): - return 'os.path' - parts = dotted_name.split('.') - if context_file is not None: - # first check for builtin module which won't be considered latter - # in that case (path != None) - if parts[0] in BUILTIN_MODULES: - if len(parts) > 2: - raise ImportError(dotted_name) - return parts[0] - # don't use += or insert, we want a new list to be created ! - path = None - starti = 0 - if parts[0] == '': - assert context_file is not None, \ - 'explicit relative import, but no context_file?' - path = [] # prevent resolving the import non-relatively - starti = 1 - while parts[starti] == '': # for all further dots: change context - starti += 1 - context_file = dirname(context_file) - for i in range(starti, len(parts)): - try: - file_from_modpath(parts[starti:i+1], - path=path, context_file=context_file) - except ImportError: - if not i >= max(1, len(parts) - 2): - raise - return '.'.join(parts[:i]) - return dotted_name - - -def get_modules(package, src_directory, blacklist=STD_BLACKLIST): - """given a package directory return a list of all available python - modules in the package and its subpackages - - :type package: str - :param package: the python name for the package - - :type src_directory: str - :param src_directory: - path of the directory corresponding to the package - - :type blacklist: list or tuple - :param blacklist: - optional list of files or directory to ignore, default to - the value of `logilab.common.STD_BLACKLIST` - - :rtype: list - :return: - the list of all available python modules in the package and its - subpackages - """ - modules = [] - for directory, dirnames, filenames in os.walk(src_directory): - _handle_blacklist(blacklist, dirnames, filenames) - # check for __init__.py - if not '__init__.py' in filenames: - dirnames[:] = () - continue - if directory != src_directory: - dir_package = directory[len(src_directory):].replace(os.sep, '.') - modules.append(package + dir_package) - for filename in filenames: - if _is_python_file(filename) and filename != '__init__.py': - src = join(directory, filename) - module = package + src[len(src_directory):-3] - modules.append(module.replace(os.sep, '.')) - return modules - - - -def get_module_files(src_directory, blacklist=STD_BLACKLIST): - """given a package directory return a list of all available python - module's files in the package and its subpackages - - :type src_directory: str - :param src_directory: - path of the directory corresponding to the package - - :type blacklist: list or tuple - :param blacklist: - optional list of files or directory to ignore, default to the value of - `logilab.common.STD_BLACKLIST` - - :rtype: list - :return: - the list of all available python module's files in the package and - its subpackages - """ - files = [] - for directory, dirnames, filenames in os.walk(src_directory): - _handle_blacklist(blacklist, dirnames, filenames) - # check for __init__.py - if not '__init__.py' in filenames: - dirnames[:] = () - continue - for filename in filenames: - if _is_python_file(filename): - src = join(directory, filename) - files.append(src) - return files - - -def get_source_file(filename, include_no_ext=False): - """given a python module's file name return the matching source file - name (the filename will be returned identically if it's a already an - absolute path to a python source file...) - - :type filename: str - :param filename: python module's file name - - - :raise NoSourceFile: if no source file exists on the file system - - :rtype: str - :return: the absolute path of the source file if it exists - """ - base, orig_ext = splitext(abspath(filename)) - for ext in PY_SOURCE_EXTS: - source_path = '%s.%s' % (base, ext) - if exists(source_path): - return source_path - if include_no_ext and not orig_ext and exists(base): - return base - raise NoSourceFile(filename) - - -def cleanup_sys_modules(directories): - """remove submodules of `directories` from `sys.modules`""" - cleaned = [] - for modname, module in list(sys.modules.items()): - modfile = getattr(module, '__file__', None) - if modfile: - for directory in directories: - if modfile.startswith(directory): - cleaned.append(modname) - del sys.modules[modname] - break - return cleaned - - -def clean_sys_modules(names): - """remove submodules starting with name from `names` from `sys.modules`""" - cleaned = set() - for modname in list(sys.modules): - for name in names: - if modname.startswith(name): - del sys.modules[modname] - cleaned.add(modname) - break - return cleaned - - -def is_python_source(filename): - """ - rtype: bool - return: True if the filename is a python source file - """ - return splitext(filename)[1][1:] in PY_SOURCE_EXTS - - -def is_standard_module(modname, std_path=(STD_LIB_DIR,)): - """try to guess if a module is a standard python module (by default, - see `std_path` parameter's description) - - :type modname: str - :param modname: name of the module we are interested in - - :type std_path: list(str) or tuple(str) - :param std_path: list of path considered as standard - - - :rtype: bool - :return: - true if the module: - - is located on the path listed in one of the directory in `std_path` - - is a built-in module - - Note: this function is known to return wrong values when inside virtualenv. - See https://www.logilab.org/ticket/294756. - """ - modname = modname.split('.')[0] - try: - filename = file_from_modpath([modname]) - except ImportError as ex: - # import failed, i'm probably not so wrong by supposing it's - # not standard... - return False - # modules which are not living in a file are considered standard - # (sys and __builtin__ for instance) - if filename is None: - # we assume there are no namespaces in stdlib - return not _is_namespace(modname) - filename = abspath(filename) - if filename.startswith(EXT_LIB_DIR): - return False - for path in std_path: - if filename.startswith(abspath(path)): - return True - return False - - - -def is_relative(modname, from_file): - """return true if the given module name is relative to the given - file name - - :type modname: str - :param modname: name of the module we are interested in - - :type from_file: str - :param from_file: - path of the module from which modname has been imported - - :rtype: bool - :return: - true if the module has been imported relatively to `from_file` - """ - if not isdir(from_file): - from_file = dirname(from_file) - if from_file in sys.path: - return False - try: - find_module(modname.split('.')[0], [from_file]) - return True - except ImportError: - return False - - -# internal only functions ##################################################### - -def _file_from_modpath(modpath, path=None, context=None): - """given a mod path (i.e. splitted module / package name), return the - corresponding file - - this function is used internally, see `file_from_modpath`'s - documentation for more information - """ - assert len(modpath) > 0 - if context is not None: - try: - mtype, mp_filename = _module_file(modpath, [context]) - except ImportError: - mtype, mp_filename = _module_file(modpath, path) - else: - mtype, mp_filename = _module_file(modpath, path) - if mtype == PY_COMPILED: - try: - return get_source_file(mp_filename) - except NoSourceFile: - return mp_filename - elif mtype == C_BUILTIN: - # integrated builtin module - return None - elif mtype == PKG_DIRECTORY: - mp_filename = _has_init(mp_filename) - return mp_filename - -def _search_zip(modpath, pic): - for filepath, importer in pic.items(): - if importer is not None: - if importer.find_module(modpath[0]): - if not importer.find_module('/'.join(modpath)): - raise ImportError('No module named %s in %s/%s' % ( - '.'.join(modpath[1:]), filepath, modpath)) - return ZIPFILE, abspath(filepath) + '/' + '/'.join(modpath), filepath - raise ImportError('No module named %s' % '.'.join(modpath)) - -try: - import pkg_resources -except ImportError: - pkg_resources = None - - -def _is_namespace(modname): - return (pkg_resources is not None - and modname in pkg_resources._namespace_packages) - - -def _module_file(modpath, path=None): - """get a module type / file path - - :type modpath: list or tuple - :param modpath: - splitted module's name (i.e name of a module or package splitted - on '.'), with leading empty strings for explicit relative import - - :type path: list or None - :param path: - optional list of path where the module or package should be - searched (use sys.path if nothing or None is given) - - - :rtype: tuple(int, str) - :return: the module type flag and the file path for a module - """ - # egg support compat - try: - pic = sys.path_importer_cache - _path = (path is None and sys.path or path) - for __path in _path: - if not __path in pic: - try: - pic[__path] = zipimport.zipimporter(__path) - except zipimport.ZipImportError: - pic[__path] = None - checkeggs = True - except AttributeError: - checkeggs = False - # pkg_resources support (aka setuptools namespace packages) - if (_is_namespace(modpath[0]) and modpath[0] in sys.modules): - # setuptools has added into sys.modules a module object with proper - # __path__, get back information from there - module = sys.modules[modpath.pop(0)] - # use list() to protect against _NamespacePath instance we get with python 3, which - # find_module later doesn't like - path = list(module.__path__) - if not modpath: - return C_BUILTIN, None - imported = [] - while modpath: - modname = modpath[0] - # take care to changes in find_module implementation wrt builtin modules - # - # Python 2.6.6 (r266:84292, Sep 11 2012, 08:34:23) - # >>> imp.find_module('posix') - # (None, 'posix', ('', '', 6)) - # - # Python 3.3.1 (default, Apr 26 2013, 12:08:46) - # >>> imp.find_module('posix') - # (None, None, ('', '', 6)) - try: - _, mp_filename, mp_desc = find_module(modname, path) - except ImportError: - if checkeggs: - return _search_zip(modpath, pic)[:2] - raise - else: - if checkeggs and mp_filename: - fullabspath = [abspath(x) for x in _path] - try: - pathindex = fullabspath.index(dirname(abspath(mp_filename))) - emtype, emp_filename, zippath = _search_zip(modpath, pic) - if pathindex > _path.index(zippath): - # an egg takes priority - return emtype, emp_filename - except ValueError: - # XXX not in _path - pass - except ImportError: - pass - checkeggs = False - imported.append(modpath.pop(0)) - mtype = mp_desc[2] - if modpath: - if mtype != PKG_DIRECTORY: - raise ImportError('No module %s in %s' % ('.'.join(modpath), - '.'.join(imported))) - # XXX guess if package is using pkgutil.extend_path by looking for - # those keywords in the first four Kbytes - try: - with open(join(mp_filename, '__init__.py')) as stream: - data = stream.read(4096) - except IOError: - path = [mp_filename] - else: - if 'pkgutil' in data and 'extend_path' in data: - # extend_path is called, search sys.path for module/packages - # of this name see pkgutil.extend_path documentation - path = [join(p, *imported) for p in sys.path - if isdir(join(p, *imported))] - else: - path = [mp_filename] - return mtype, mp_filename - -def _is_python_file(filename): - """return true if the given filename should be considered as a python file - - .pyc and .pyo are ignored - """ - for ext in ('.py', '.so', '.pyd', '.pyw'): - if filename.endswith(ext): - return True - return False - - -def _has_init(directory): - """if the given directory has a valid __init__ file, return its path, - else return None - """ - mod_or_pack = join(directory, '__init__') - for ext in PY_SOURCE_EXTS + ('pyc', 'pyo'): - if exists(mod_or_pack + '.' + ext): - return mod_or_pack + '.' + ext - return None diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/optik_ext.py b/pymode/libs/logilab-common-1.4.1/logilab/common/optik_ext.py deleted file mode 100644 index 95489c28..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/optik_ext.py +++ /dev/null @@ -1,394 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Add an abstraction level to transparently import optik classes from optparse -(python >= 2.3) or the optik package. - -It also defines three new types for optik/optparse command line parser : - - * regexp - argument of this type will be converted using re.compile - * csv - argument of this type will be converted using split(',') - * yn - argument of this type will be true if 'y' or 'yes', false if 'n' or 'no' - * named - argument of this type are in the form = or : - * password - argument of this type wont be converted but this is used by other tools - such as interactive prompt for configuration to double check value and - use an invisible field - * multiple_choice - same as default "choice" type but multiple choices allowed - * file - argument of this type wont be converted but checked that the given file exists - * color - argument of this type wont be converted but checked its either a - named color or a color specified using hexadecimal notation (preceded by a #) - * time - argument of this type will be converted to a float value in seconds - according to time units (ms, s, min, h, d) - * bytes - argument of this type will be converted to a float value in bytes - according to byte units (b, kb, mb, gb, tb) -""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -import re -import sys -import time -from copy import copy -from os.path import exists - -from six import integer_types - -# python >= 2.3 -from optparse import OptionParser as BaseParser, Option as BaseOption, \ - OptionGroup, OptionContainer, OptionValueError, OptionError, \ - Values, HelpFormatter, NO_DEFAULT, SUPPRESS_HELP - -try: - from mx import DateTime - HAS_MX_DATETIME = True -except ImportError: - HAS_MX_DATETIME = False - -from logilab.common.textutils import splitstrip, TIME_UNITS, BYTE_UNITS, \ - apply_units - - -def check_regexp(option, opt, value): - """check a regexp value by trying to compile it - return the compiled regexp - """ - if hasattr(value, 'pattern'): - return value - try: - return re.compile(value) - except ValueError: - raise OptionValueError( - "option %s: invalid regexp value: %r" % (opt, value)) - -def check_csv(option, opt, value): - """check a csv value by trying to split it - return the list of separated values - """ - if isinstance(value, (list, tuple)): - return value - try: - return splitstrip(value) - except ValueError: - raise OptionValueError( - "option %s: invalid csv value: %r" % (opt, value)) - -def check_yn(option, opt, value): - """check a yn value - return true for yes and false for no - """ - if isinstance(value, int): - return bool(value) - if value in ('y', 'yes'): - return True - if value in ('n', 'no'): - return False - msg = "option %s: invalid yn value %r, should be in (y, yes, n, no)" - raise OptionValueError(msg % (opt, value)) - -def check_named(option, opt, value): - """check a named value - return a dictionary containing (name, value) associations - """ - if isinstance(value, dict): - return value - values = [] - for value in check_csv(option, opt, value): - if value.find('=') != -1: - values.append(value.split('=', 1)) - elif value.find(':') != -1: - values.append(value.split(':', 1)) - if values: - return dict(values) - msg = "option %s: invalid named value %r, should be = or \ -:" - raise OptionValueError(msg % (opt, value)) - -def check_password(option, opt, value): - """check a password value (can't be empty) - """ - # no actual checking, monkey patch if you want more - return value - -def check_file(option, opt, value): - """check a file value - return the filepath - """ - if exists(value): - return value - msg = "option %s: file %r does not exist" - raise OptionValueError(msg % (opt, value)) - -# XXX use python datetime -def check_date(option, opt, value): - """check a file value - return the filepath - """ - try: - return DateTime.strptime(value, "%Y/%m/%d") - except DateTime.Error : - raise OptionValueError( - "expected format of %s is yyyy/mm/dd" % opt) - -def check_color(option, opt, value): - """check a color value and returns it - /!\ does *not* check color labels (like 'red', 'green'), only - checks hexadecimal forms - """ - # Case (1) : color label, we trust the end-user - if re.match('[a-z0-9 ]+$', value, re.I): - return value - # Case (2) : only accepts hexadecimal forms - if re.match('#[a-f0-9]{6}', value, re.I): - return value - # Else : not a color label neither a valid hexadecimal form => error - msg = "option %s: invalid color : %r, should be either hexadecimal \ - value or predefined color" - raise OptionValueError(msg % (opt, value)) - -def check_time(option, opt, value): - if isinstance(value, integer_types + (float,)): - return value - return apply_units(value, TIME_UNITS) - -def check_bytes(option, opt, value): - if hasattr(value, '__int__'): - return value - return apply_units(value, BYTE_UNITS, final=int) - - -class Option(BaseOption): - """override optik.Option to add some new option types - """ - TYPES = BaseOption.TYPES + ('regexp', 'csv', 'yn', 'named', 'password', - 'multiple_choice', 'file', 'color', - 'time', 'bytes') - ATTRS = BaseOption.ATTRS + ['hide', 'level'] - TYPE_CHECKER = copy(BaseOption.TYPE_CHECKER) - TYPE_CHECKER['regexp'] = check_regexp - TYPE_CHECKER['csv'] = check_csv - TYPE_CHECKER['yn'] = check_yn - TYPE_CHECKER['named'] = check_named - TYPE_CHECKER['multiple_choice'] = check_csv - TYPE_CHECKER['file'] = check_file - TYPE_CHECKER['color'] = check_color - TYPE_CHECKER['password'] = check_password - TYPE_CHECKER['time'] = check_time - TYPE_CHECKER['bytes'] = check_bytes - if HAS_MX_DATETIME: - TYPES += ('date',) - TYPE_CHECKER['date'] = check_date - - def __init__(self, *opts, **attrs): - BaseOption.__init__(self, *opts, **attrs) - if hasattr(self, "hide") and self.hide: - self.help = SUPPRESS_HELP - - def _check_choice(self): - """FIXME: need to override this due to optik misdesign""" - if self.type in ("choice", "multiple_choice"): - if self.choices is None: - raise OptionError( - "must supply a list of choices for type 'choice'", self) - elif not isinstance(self.choices, (tuple, list)): - raise OptionError( - "choices must be a list of strings ('%s' supplied)" - % str(type(self.choices)).split("'")[1], self) - elif self.choices is not None: - raise OptionError( - "must not supply choices for type %r" % self.type, self) - BaseOption.CHECK_METHODS[2] = _check_choice - - - def process(self, opt, value, values, parser): - # First, convert the value(s) to the right type. Howl if any - # value(s) are bogus. - value = self.convert_value(opt, value) - if self.type == 'named': - existant = getattr(values, self.dest) - if existant: - existant.update(value) - value = existant - # And then take whatever action is expected of us. - # This is a separate method to make life easier for - # subclasses to add new actions. - return self.take_action( - self.action, self.dest, opt, value, values, parser) - - -class OptionParser(BaseParser): - """override optik.OptionParser to use our Option class - """ - def __init__(self, option_class=Option, *args, **kwargs): - BaseParser.__init__(self, option_class=Option, *args, **kwargs) - - def format_option_help(self, formatter=None): - if formatter is None: - formatter = self.formatter - outputlevel = getattr(formatter, 'output_level', 0) - formatter.store_option_strings(self) - result = [] - result.append(formatter.format_heading("Options")) - formatter.indent() - if self.option_list: - result.append(OptionContainer.format_option_help(self, formatter)) - result.append("\n") - for group in self.option_groups: - if group.level <= outputlevel and ( - group.description or level_options(group, outputlevel)): - result.append(group.format_help(formatter)) - result.append("\n") - formatter.dedent() - # Drop the last "\n", or the header if no options or option groups: - return "".join(result[:-1]) - - -OptionGroup.level = 0 - -def level_options(group, outputlevel): - return [option for option in group.option_list - if (getattr(option, 'level', 0) or 0) <= outputlevel - and not option.help is SUPPRESS_HELP] - -def format_option_help(self, formatter): - result = [] - outputlevel = getattr(formatter, 'output_level', 0) or 0 - for option in level_options(self, outputlevel): - result.append(formatter.format_option(option)) - return "".join(result) -OptionContainer.format_option_help = format_option_help - - -class ManHelpFormatter(HelpFormatter): - """Format help using man pages ROFF format""" - - def __init__ (self, - indent_increment=0, - max_help_position=24, - width=79, - short_first=0): - HelpFormatter.__init__ ( - self, indent_increment, max_help_position, width, short_first) - - def format_heading(self, heading): - return '.SH %s\n' % heading.upper() - - def format_description(self, description): - return description - - def format_option(self, option): - try: - optstring = option.option_strings - except AttributeError: - optstring = self.format_option_strings(option) - if option.help: - help_text = self.expand_default(option) - help = ' '.join([l.strip() for l in help_text.splitlines()]) - else: - help = '' - return '''.IP "%s" -%s -''' % (optstring, help) - - def format_head(self, optparser, pkginfo, section=1): - long_desc = "" - try: - pgm = optparser._get_prog_name() - except AttributeError: - # py >= 2.4.X (dunno which X exactly, at least 2) - pgm = optparser.get_prog_name() - short_desc = self.format_short_description(pgm, pkginfo.description) - if hasattr(pkginfo, "long_desc"): - long_desc = self.format_long_description(pgm, pkginfo.long_desc) - return '%s\n%s\n%s\n%s' % (self.format_title(pgm, section), - short_desc, self.format_synopsis(pgm), - long_desc) - - def format_title(self, pgm, section): - date = '-'.join([str(num) for num in time.localtime()[:3]]) - return '.TH %s %s "%s" %s' % (pgm, section, date, pgm) - - def format_short_description(self, pgm, short_desc): - return '''.SH NAME -.B %s -\- %s -''' % (pgm, short_desc.strip()) - - def format_synopsis(self, pgm): - return '''.SH SYNOPSIS -.B %s -[ -.I OPTIONS -] [ -.I -] -''' % pgm - - def format_long_description(self, pgm, long_desc): - long_desc = '\n'.join([line.lstrip() - for line in long_desc.splitlines()]) - long_desc = long_desc.replace('\n.\n', '\n\n') - if long_desc.lower().startswith(pgm): - long_desc = long_desc[len(pgm):] - return '''.SH DESCRIPTION -.B %s -%s -''' % (pgm, long_desc.strip()) - - def format_tail(self, pkginfo): - tail = '''.SH SEE ALSO -/usr/share/doc/pythonX.Y-%s/ - -.SH BUGS -Please report bugs on the project\'s mailing list: -%s - -.SH AUTHOR -%s <%s> -''' % (getattr(pkginfo, 'debian_name', pkginfo.modname), - pkginfo.mailinglist, pkginfo.author, pkginfo.author_email) - - if hasattr(pkginfo, "copyright"): - tail += ''' -.SH COPYRIGHT -%s -''' % pkginfo.copyright - - return tail - -def generate_manpage(optparser, pkginfo, section=1, stream=sys.stdout, level=0): - """generate a man page from an optik parser""" - formatter = ManHelpFormatter() - formatter.output_level = level - formatter.parser = optparser - print(formatter.format_head(optparser, pkginfo, section), file=stream) - print(optparser.format_option_help(formatter), file=stream) - print(formatter.format_tail(pkginfo), file=stream) - - -__all__ = ('OptionParser', 'Option', 'OptionGroup', 'OptionValueError', - 'Values') diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/optparser.py b/pymode/libs/logilab-common-1.4.1/logilab/common/optparser.py deleted file mode 100644 index aa17750e..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/optparser.py +++ /dev/null @@ -1,92 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Extend OptionParser with commands. - -Example: - ->>> parser = OptionParser() ->>> parser.usage = '%prog COMMAND [options] ...' ->>> parser.add_command('build', 'mymod.build') ->>> parser.add_command('clean', run_clean, add_opt_clean) ->>> run, options, args = parser.parse_command(sys.argv[1:]) ->>> return run(options, args[1:]) - -With mymod.build that defines two functions run and add_options -""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -from warnings import warn -warn('lgc.optparser module is deprecated, use lgc.clcommands instead', DeprecationWarning, - stacklevel=2) - -import sys -import optparse - -class OptionParser(optparse.OptionParser): - - def __init__(self, *args, **kwargs): - optparse.OptionParser.__init__(self, *args, **kwargs) - self._commands = {} - self.min_args, self.max_args = 0, 1 - - def add_command(self, name, mod_or_funcs, help=''): - """name of the command, name of module or tuple of functions - (run, add_options) - """ - assert isinstance(mod_or_funcs, str) or isinstance(mod_or_funcs, tuple), \ - "mod_or_funcs has to be a module name or a tuple of functions" - self._commands[name] = (mod_or_funcs, help) - - def print_main_help(self): - optparse.OptionParser.print_help(self) - print('\ncommands:') - for cmdname, (_, help) in self._commands.items(): - print('% 10s - %s' % (cmdname, help)) - - def parse_command(self, args): - if len(args) == 0: - self.print_main_help() - sys.exit(1) - cmd = args[0] - args = args[1:] - if cmd not in self._commands: - if cmd in ('-h', '--help'): - self.print_main_help() - sys.exit(0) - elif self.version is not None and cmd == "--version": - self.print_version() - sys.exit(0) - self.error('unknown command') - self.prog = '%s %s' % (self.prog, cmd) - mod_or_f, help = self._commands[cmd] - # optparse inserts self.description between usage and options help - self.description = help - if isinstance(mod_or_f, str): - exec('from %s import run, add_options' % mod_or_f) - else: - run, add_options = mod_or_f - add_options(self) - (options, args) = self.parse_args(args) - if not (self.min_args <= len(args) <= self.max_args): - self.error('incorrect number of arguments') - return run, options, args - - diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/proc.py b/pymode/libs/logilab-common-1.4.1/logilab/common/proc.py deleted file mode 100644 index c27356c6..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/proc.py +++ /dev/null @@ -1,277 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""module providing: -* process information (linux specific: rely on /proc) -* a class for resource control (memory / time / cpu time) - -This module doesn't work on windows platforms (only tested on linux) - -:organization: Logilab - - - -""" -__docformat__ = "restructuredtext en" - -import os -import stat -from resource import getrlimit, setrlimit, RLIMIT_CPU, RLIMIT_AS -from signal import signal, SIGXCPU, SIGKILL, SIGUSR2, SIGUSR1 -from threading import Timer, currentThread, Thread, Event -from time import time - -from logilab.common.tree import Node - -class NoSuchProcess(Exception): pass - -def proc_exists(pid): - """check the a pid is registered in /proc - raise NoSuchProcess exception if not - """ - if not os.path.exists('/proc/%s' % pid): - raise NoSuchProcess() - -PPID = 3 -UTIME = 13 -STIME = 14 -CUTIME = 15 -CSTIME = 16 -VSIZE = 22 - -class ProcInfo(Node): - """provide access to process information found in /proc""" - - def __init__(self, pid): - self.pid = int(pid) - Node.__init__(self, self.pid) - proc_exists(self.pid) - self.file = '/proc/%s/stat' % self.pid - self.ppid = int(self.status()[PPID]) - - def memory_usage(self): - """return the memory usage of the process in Ko""" - try : - return int(self.status()[VSIZE]) - except IOError: - return 0 - - def lineage_memory_usage(self): - return self.memory_usage() + sum([child.lineage_memory_usage() - for child in self.children]) - - def time(self, children=0): - """return the number of jiffies that this process has been scheduled - in user and kernel mode""" - status = self.status() - time = int(status[UTIME]) + int(status[STIME]) - if children: - time += int(status[CUTIME]) + int(status[CSTIME]) - return time - - def status(self): - """return the list of fields found in /proc//stat""" - return open(self.file).read().split() - - def name(self): - """return the process name found in /proc//stat - """ - return self.status()[1].strip('()') - - def age(self): - """return the age of the process - """ - return os.stat(self.file)[stat.ST_MTIME] - -class ProcInfoLoader: - """manage process information""" - - def __init__(self): - self._loaded = {} - - def list_pids(self): - """return a list of existent process ids""" - for subdir in os.listdir('/proc'): - if subdir.isdigit(): - yield int(subdir) - - def load(self, pid): - """get a ProcInfo object for a given pid""" - pid = int(pid) - try: - return self._loaded[pid] - except KeyError: - procinfo = ProcInfo(pid) - procinfo.manager = self - self._loaded[pid] = procinfo - return procinfo - - - def load_all(self): - """load all processes information""" - for pid in self.list_pids(): - try: - procinfo = self.load(pid) - if procinfo.parent is None and procinfo.ppid: - pprocinfo = self.load(procinfo.ppid) - pprocinfo.append(procinfo) - except NoSuchProcess: - pass - - -try: - class ResourceError(BaseException): - """Error raise when resource limit is reached""" - limit = "Unknown Resource Limit" -except NameError: - class ResourceError(Exception): - """Error raise when resource limit is reached""" - limit = "Unknown Resource Limit" - - -class XCPUError(ResourceError): - """Error raised when CPU Time limit is reached""" - limit = "CPU Time" - -class LineageMemoryError(ResourceError): - """Error raised when the total amount of memory used by a process and - it's child is reached""" - limit = "Lineage total Memory" - -class TimeoutError(ResourceError): - """Error raised when the process is running for to much time""" - limit = "Real Time" - -# Can't use subclass because the StandardError MemoryError raised -RESOURCE_LIMIT_EXCEPTION = (ResourceError, MemoryError) - - -class MemorySentinel(Thread): - """A class checking a process don't use too much memory in a separated - daemonic thread - """ - def __init__(self, interval, memory_limit, gpid=os.getpid()): - Thread.__init__(self, target=self._run, name="Test.Sentinel") - self.memory_limit = memory_limit - self._stop = Event() - self.interval = interval - self.setDaemon(True) - self.gpid = gpid - - def stop(self): - """stop ap""" - self._stop.set() - - def _run(self): - pil = ProcInfoLoader() - while not self._stop.isSet(): - if self.memory_limit <= pil.load(self.gpid).lineage_memory_usage(): - os.killpg(self.gpid, SIGUSR1) - self._stop.wait(self.interval) - - -class ResourceController: - - def __init__(self, max_cpu_time=None, max_time=None, max_memory=None, - max_reprieve=60): - if SIGXCPU == -1: - raise RuntimeError("Unsupported platform") - self.max_time = max_time - self.max_memory = max_memory - self.max_cpu_time = max_cpu_time - self._reprieve = max_reprieve - self._timer = None - self._msentinel = None - self._old_max_memory = None - self._old_usr1_hdlr = None - self._old_max_cpu_time = None - self._old_usr2_hdlr = None - self._old_sigxcpu_hdlr = None - self._limit_set = 0 - self._abort_try = 0 - self._start_time = None - self._elapse_time = 0 - - def _hangle_sig_timeout(self, sig, frame): - raise TimeoutError() - - def _hangle_sig_memory(self, sig, frame): - if self._abort_try < self._reprieve: - self._abort_try += 1 - raise LineageMemoryError("Memory limit reached") - else: - os.killpg(os.getpid(), SIGKILL) - - def _handle_sigxcpu(self, sig, frame): - if self._abort_try < self._reprieve: - self._abort_try += 1 - raise XCPUError("Soft CPU time limit reached") - else: - os.killpg(os.getpid(), SIGKILL) - - def _time_out(self): - if self._abort_try < self._reprieve: - self._abort_try += 1 - os.killpg(os.getpid(), SIGUSR2) - if self._limit_set > 0: - self._timer = Timer(1, self._time_out) - self._timer.start() - else: - os.killpg(os.getpid(), SIGKILL) - - def setup_limit(self): - """set up the process limit""" - assert currentThread().getName() == 'MainThread' - os.setpgrp() - if self._limit_set <= 0: - if self.max_time is not None: - self._old_usr2_hdlr = signal(SIGUSR2, self._hangle_sig_timeout) - self._timer = Timer(max(1, int(self.max_time) - self._elapse_time), - self._time_out) - self._start_time = int(time()) - self._timer.start() - if self.max_cpu_time is not None: - self._old_max_cpu_time = getrlimit(RLIMIT_CPU) - cpu_limit = (int(self.max_cpu_time), self._old_max_cpu_time[1]) - self._old_sigxcpu_hdlr = signal(SIGXCPU, self._handle_sigxcpu) - setrlimit(RLIMIT_CPU, cpu_limit) - if self.max_memory is not None: - self._msentinel = MemorySentinel(1, int(self.max_memory) ) - self._old_max_memory = getrlimit(RLIMIT_AS) - self._old_usr1_hdlr = signal(SIGUSR1, self._hangle_sig_memory) - as_limit = (int(self.max_memory), self._old_max_memory[1]) - setrlimit(RLIMIT_AS, as_limit) - self._msentinel.start() - self._limit_set += 1 - - def clean_limit(self): - """reinstall the old process limit""" - if self._limit_set > 0: - if self.max_time is not None: - self._timer.cancel() - self._elapse_time += int(time())-self._start_time - self._timer = None - signal(SIGUSR2, self._old_usr2_hdlr) - if self.max_cpu_time is not None: - setrlimit(RLIMIT_CPU, self._old_max_cpu_time) - signal(SIGXCPU, self._old_sigxcpu_hdlr) - if self.max_memory is not None: - self._msentinel.stop() - self._msentinel = None - setrlimit(RLIMIT_AS, self._old_max_memory) - signal(SIGUSR1, self._old_usr1_hdlr) - self._limit_set -= 1 diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/pytest.py b/pymode/libs/logilab-common-1.4.1/logilab/common/pytest.py deleted file mode 100644 index c644a61f..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/pytest.py +++ /dev/null @@ -1,1304 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""logilab-pytest is a tool that eases test running and debugging. - -To be able to use logilab-pytest, you should either write tests using -the logilab.common.testlib's framework or the unittest module of the -Python's standard library. - -You can customize logilab-pytest's behaviour by defining a ``pytestconf.py`` -file somewhere in your test directory. In this file, you can add options or -change the way tests are run. - -To add command line options, you must define a ``update_parser`` function in -your ``pytestconf.py`` file. The function must accept a single parameter -that will be the OptionParser's instance to customize. - -If you wish to customize the tester, you'll have to define a class named -``CustomPyTester``. This class should extend the default `PyTester` class -defined in the logilab.common.pytest module. Take a look at the `PyTester` and -`DjangoTester` classes for more information about what can be done. - -For instance, if you wish to add a custom -l option to specify a loglevel, you -could define the following ``pytestconf.py`` file :: - - import logging - from logilab.common.pytest import PyTester - - def update_parser(parser): - parser.add_option('-l', '--loglevel', dest='loglevel', action='store', - choices=('debug', 'info', 'warning', 'error', 'critical'), - default='critical', help="the default log level possible choices are " - "('debug', 'info', 'warning', 'error', 'critical')") - return parser - - - class CustomPyTester(PyTester): - def __init__(self, cvg, options): - super(CustomPyTester, self).__init__(cvg, options) - loglevel = options.loglevel.upper() - logger = logging.getLogger('erudi') - logger.setLevel(logging.getLevelName(loglevel)) - - -In your TestCase class you can then get the value of a specific option with -the ``optval`` method:: - - class MyTestCase(TestCase): - def test_foo(self): - loglevel = self.optval('loglevel') - # ... - - -You can also tag your tag your test for fine filtering - -With those tag:: - - from logilab.common.testlib import tag, TestCase - - class Exemple(TestCase): - - @tag('rouge', 'carre') - def toto(self): - pass - - @tag('carre', 'vert') - def tata(self): - pass - - @tag('rouge') - def titi(test): - pass - -you can filter the function with a simple python expression - - * ``toto`` and ``titi`` match ``rouge`` - * ``toto``, ``tata`` and ``titi``, match ``rouge or carre`` - * ``tata`` and ``titi`` match``rouge ^ carre`` - * ``titi`` match ``rouge and not carre`` -""" - -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -PYTEST_DOC = """%prog [OPTIONS] [testfile [testpattern]] - -examples: - -logilab-pytest path/to/mytests.py -logilab-pytest path/to/mytests.py TheseTests -logilab-pytest path/to/mytests.py TheseTests.test_thisone -logilab-pytest path/to/mytests.py -m '(not long and database) or regr' - -logilab-pytest one (will run both test_thisone and test_thatone) -logilab-pytest path/to/mytests.py -s not (will skip test_notthisone) -""" - -ENABLE_DBC = False -FILE_RESTART = ".pytest.restart" - -import os, sys, re -import os.path as osp -from time import time, clock -import warnings -import types -import inspect -import traceback -from inspect import isgeneratorfunction, isclass -from random import shuffle -from itertools import dropwhile - -from logilab.common.deprecation import deprecated -from logilab.common.fileutils import abspath_listdir -from logilab.common import textutils -from logilab.common import testlib, STD_BLACKLIST -# use the same unittest module as testlib -from logilab.common.testlib import unittest, start_interactive_mode -from logilab.common.testlib import nocoverage, pause_trace, replace_trace # bwcompat -from logilab.common.debugger import Debugger, colorize_source -import doctest - -import unittest as unittest_legacy -if not getattr(unittest_legacy, "__package__", None): - try: - import unittest2.suite as unittest_suite - except ImportError: - sys.exit("You have to install python-unittest2 to use this module") -else: - import unittest.suite as unittest_suite - -try: - import django - from logilab.common.modutils import modpath_from_file, load_module_from_modpath - DJANGO_FOUND = True -except ImportError: - DJANGO_FOUND = False - -CONF_FILE = 'pytestconf.py' - -TESTFILE_RE = re.compile("^((unit)?test.*|smoketest)\.py$") -def this_is_a_testfile(filename): - """returns True if `filename` seems to be a test file""" - return TESTFILE_RE.match(osp.basename(filename)) - -TESTDIR_RE = re.compile("^(unit)?tests?$") -def this_is_a_testdir(dirpath): - """returns True if `filename` seems to be a test directory""" - return TESTDIR_RE.match(osp.basename(dirpath)) - - -def load_pytest_conf(path, parser): - """loads a ``pytestconf.py`` file and update default parser - and / or tester. - """ - namespace = {} - exec(open(path, 'rb').read(), namespace) - if 'update_parser' in namespace: - namespace['update_parser'](parser) - return namespace.get('CustomPyTester', PyTester) - - -def project_root(parser, projdir=os.getcwd()): - """try to find project's root and add it to sys.path""" - previousdir = curdir = osp.abspath(projdir) - testercls = PyTester - conf_file_path = osp.join(curdir, CONF_FILE) - if osp.isfile(conf_file_path): - testercls = load_pytest_conf(conf_file_path, parser) - while this_is_a_testdir(curdir) or \ - osp.isfile(osp.join(curdir, '__init__.py')): - newdir = osp.normpath(osp.join(curdir, os.pardir)) - if newdir == curdir: - break - previousdir = curdir - curdir = newdir - conf_file_path = osp.join(curdir, CONF_FILE) - if osp.isfile(conf_file_path): - testercls = load_pytest_conf(conf_file_path, parser) - return previousdir, testercls - - -class GlobalTestReport(object): - """this class holds global test statistics""" - def __init__(self): - self.ran = 0 - self.skipped = 0 - self.failures = 0 - self.errors = 0 - self.ttime = 0 - self.ctime = 0 - self.modulescount = 0 - self.errmodules = [] - - def feed(self, filename, testresult, ttime, ctime): - """integrates new test information into internal statistics""" - ran = testresult.testsRun - self.ran += ran - self.skipped += len(getattr(testresult, 'skipped', ())) - self.failures += len(testresult.failures) - self.errors += len(testresult.errors) - self.ttime += ttime - self.ctime += ctime - self.modulescount += 1 - if not testresult.wasSuccessful(): - problems = len(testresult.failures) + len(testresult.errors) - self.errmodules.append((filename[:-3], problems, ran)) - - def failed_to_test_module(self, filename): - """called when the test module could not be imported by unittest - """ - self.errors += 1 - self.modulescount += 1 - self.ran += 1 - self.errmodules.append((filename[:-3], 1, 1)) - - def skip_module(self, filename): - self.modulescount += 1 - self.ran += 1 - self.errmodules.append((filename[:-3], 0, 0)) - - def __str__(self): - """this is just presentation stuff""" - line1 = ['Ran %s test cases in %.2fs (%.2fs CPU)' - % (self.ran, self.ttime, self.ctime)] - if self.errors: - line1.append('%s errors' % self.errors) - if self.failures: - line1.append('%s failures' % self.failures) - if self.skipped: - line1.append('%s skipped' % self.skipped) - modulesok = self.modulescount - len(self.errmodules) - if self.errors or self.failures: - line2 = '%s modules OK (%s failed)' % (modulesok, - len(self.errmodules)) - descr = ', '.join(['%s [%s/%s]' % info for info in self.errmodules]) - line3 = '\nfailures: %s' % descr - elif modulesok: - line2 = 'All %s modules OK' % modulesok - line3 = '' - else: - return '' - return '%s\n%s%s' % (', '.join(line1), line2, line3) - - - -def remove_local_modules_from_sys(testdir): - """remove all modules from cache that come from `testdir` - - This is used to avoid strange side-effects when using the - testall() mode of pytest. - For instance, if we run pytest on this tree:: - - A/test/test_utils.py - B/test/test_utils.py - - we **have** to clean sys.modules to make sure the correct test_utils - module is ran in B - """ - for modname, mod in list(sys.modules.items()): - if mod is None: - continue - if not hasattr(mod, '__file__'): - # this is the case of some built-in modules like sys, imp, marshal - continue - modfile = mod.__file__ - # if modfile is not an absolute path, it was probably loaded locally - # during the tests - if not osp.isabs(modfile) or modfile.startswith(testdir): - del sys.modules[modname] - - - -class PyTester(object): - """encapsulates testrun logic""" - - def __init__(self, cvg, options): - self.report = GlobalTestReport() - self.cvg = cvg - self.options = options - self.firstwrite = True - self._errcode = None - - def show_report(self): - """prints the report and returns appropriate exitcode""" - # everything has been ran, print report - print("*" * 79) - print(self.report) - - def get_errcode(self): - # errcode set explicitly - if self._errcode is not None: - return self._errcode - return self.report.failures + self.report.errors - - def set_errcode(self, errcode): - self._errcode = errcode - errcode = property(get_errcode, set_errcode) - - def testall(self, exitfirst=False): - """walks through current working directory, finds something - which can be considered as a testdir and runs every test there - """ - here = os.getcwd() - for dirname, dirs, _ in os.walk(here): - for skipped in STD_BLACKLIST: - if skipped in dirs: - dirs.remove(skipped) - basename = osp.basename(dirname) - if this_is_a_testdir(basename): - print("going into", dirname) - # we found a testdir, let's explore it ! - if not self.testonedir(dirname, exitfirst): - break - dirs[:] = [] - if self.report.ran == 0: - print("no test dir found testing here:", here) - # if no test was found during the visit, consider - # the local directory as a test directory even if - # it doesn't have a traditional test directory name - self.testonedir(here) - - def testonedir(self, testdir, exitfirst=False): - """finds each testfile in the `testdir` and runs it - - return true when all tests has been executed, false if exitfirst and - some test has failed. - """ - files = abspath_listdir(testdir) - shuffle(files) - for filename in files: - if this_is_a_testfile(filename): - if self.options.exitfirst and not self.options.restart: - # overwrite restart file - try: - restartfile = open(FILE_RESTART, "w") - restartfile.close() - except Exception: - print("Error while overwriting succeeded test file :", - osp.join(os.getcwd(), FILE_RESTART), - file=sys.__stderr__) - raise - # run test and collect information - prog = self.testfile(filename, batchmode=True) - if exitfirst and (prog is None or not prog.result.wasSuccessful()): - return False - self.firstwrite = True - # clean local modules - remove_local_modules_from_sys(testdir) - return True - - def testfile(self, filename, batchmode=False): - """runs every test in `filename` - - :param filename: an absolute path pointing to a unittest file - """ - here = os.getcwd() - dirname = osp.dirname(filename) - if dirname: - os.chdir(dirname) - # overwrite restart file if it has not been done already - if self.options.exitfirst and not self.options.restart and self.firstwrite: - try: - restartfile = open(FILE_RESTART, "w") - restartfile.close() - except Exception: - print("Error while overwriting succeeded test file :", - osp.join(os.getcwd(), FILE_RESTART), file=sys.__stderr__) - raise - modname = osp.basename(filename)[:-3] - print((' %s ' % osp.basename(filename)).center(70, '='), - file=sys.__stderr__) - try: - tstart, cstart = time(), clock() - try: - testprog = SkipAwareTestProgram(modname, batchmode=batchmode, cvg=self.cvg, - options=self.options, outstream=sys.stderr) - except KeyboardInterrupt: - raise - except SystemExit as exc: - self.errcode = exc.code - raise - except testlib.SkipTest: - print("Module skipped:", filename) - self.report.skip_module(filename) - return None - except Exception: - self.report.failed_to_test_module(filename) - print('unhandled exception occurred while testing', modname, - file=sys.stderr) - import traceback - traceback.print_exc(file=sys.stderr) - return None - - tend, cend = time(), clock() - ttime, ctime = (tend - tstart), (cend - cstart) - self.report.feed(filename, testprog.result, ttime, ctime) - return testprog - finally: - if dirname: - os.chdir(here) - - - -class DjangoTester(PyTester): - - def load_django_settings(self, dirname): - """try to find project's setting and load it""" - curdir = osp.abspath(dirname) - previousdir = curdir - while not osp.isfile(osp.join(curdir, 'settings.py')) and \ - osp.isfile(osp.join(curdir, '__init__.py')): - newdir = osp.normpath(osp.join(curdir, os.pardir)) - if newdir == curdir: - raise AssertionError('could not find settings.py') - previousdir = curdir - curdir = newdir - # late django initialization - settings = load_module_from_modpath(modpath_from_file(osp.join(curdir, 'settings.py'))) - from django.core.management import setup_environ - setup_environ(settings) - settings.DEBUG = False - self.settings = settings - # add settings dir to pythonpath since it's the project's root - if curdir not in sys.path: - sys.path.insert(1, curdir) - - def before_testfile(self): - # Those imports must be done **after** setup_environ was called - from django.test.utils import setup_test_environment - from django.test.utils import create_test_db - setup_test_environment() - create_test_db(verbosity=0) - self.dbname = self.settings.TEST_DATABASE_NAME - - def after_testfile(self): - # Those imports must be done **after** setup_environ was called - from django.test.utils import teardown_test_environment - from django.test.utils import destroy_test_db - teardown_test_environment() - print('destroying', self.dbname) - destroy_test_db(self.dbname, verbosity=0) - - def testall(self, exitfirst=False): - """walks through current working directory, finds something - which can be considered as a testdir and runs every test there - """ - for dirname, dirs, files in os.walk(os.getcwd()): - for skipped in ('CVS', '.svn', '.hg'): - if skipped in dirs: - dirs.remove(skipped) - if 'tests.py' in files: - if not self.testonedir(dirname, exitfirst): - break - dirs[:] = [] - else: - basename = osp.basename(dirname) - if basename in ('test', 'tests'): - print("going into", dirname) - # we found a testdir, let's explore it ! - if not self.testonedir(dirname, exitfirst): - break - dirs[:] = [] - - def testonedir(self, testdir, exitfirst=False): - """finds each testfile in the `testdir` and runs it - - return true when all tests has been executed, false if exitfirst and - some test has failed. - """ - # special django behaviour : if tests are splitted in several files, - # remove the main tests.py file and tests each test file separately - testfiles = [fpath for fpath in abspath_listdir(testdir) - if this_is_a_testfile(fpath)] - if len(testfiles) > 1: - try: - testfiles.remove(osp.join(testdir, 'tests.py')) - except ValueError: - pass - for filename in testfiles: - # run test and collect information - prog = self.testfile(filename, batchmode=True) - if exitfirst and (prog is None or not prog.result.wasSuccessful()): - return False - # clean local modules - remove_local_modules_from_sys(testdir) - return True - - def testfile(self, filename, batchmode=False): - """runs every test in `filename` - - :param filename: an absolute path pointing to a unittest file - """ - here = os.getcwd() - dirname = osp.dirname(filename) - if dirname: - os.chdir(dirname) - self.load_django_settings(dirname) - modname = osp.basename(filename)[:-3] - print((' %s ' % osp.basename(filename)).center(70, '='), - file=sys.stderr) - try: - try: - tstart, cstart = time(), clock() - self.before_testfile() - testprog = SkipAwareTestProgram(modname, batchmode=batchmode, cvg=self.cvg) - tend, cend = time(), clock() - ttime, ctime = (tend - tstart), (cend - cstart) - self.report.feed(filename, testprog.result, ttime, ctime) - return testprog - except SystemExit: - raise - except Exception as exc: - import traceback - traceback.print_exc() - self.report.failed_to_test_module(filename) - print('unhandled exception occurred while testing', modname) - print('error: %s' % exc) - return None - finally: - self.after_testfile() - if dirname: - os.chdir(here) - - -def make_parser(): - """creates the OptionParser instance - """ - from optparse import OptionParser - parser = OptionParser(usage=PYTEST_DOC) - - parser.newargs = [] - def rebuild_cmdline(option, opt, value, parser): - """carry the option to unittest_main""" - parser.newargs.append(opt) - - def rebuild_and_store(option, opt, value, parser): - """carry the option to unittest_main and store - the value on current parser - """ - parser.newargs.append(opt) - setattr(parser.values, option.dest, True) - - def capture_and_rebuild(option, opt, value, parser): - warnings.simplefilter('ignore', DeprecationWarning) - rebuild_cmdline(option, opt, value, parser) - - # logilab-pytest options - parser.add_option('-t', dest='testdir', default=None, - help="directory where the tests will be found") - parser.add_option('-d', dest='dbc', default=False, - action="store_true", help="enable design-by-contract") - # unittest_main options provided and passed through logilab-pytest - parser.add_option('-v', '--verbose', callback=rebuild_cmdline, - action="callback", help="Verbose output") - parser.add_option('-i', '--pdb', callback=rebuild_and_store, - dest="pdb", action="callback", - help="Enable test failure inspection") - parser.add_option('-x', '--exitfirst', callback=rebuild_and_store, - dest="exitfirst", default=False, - action="callback", help="Exit on first failure " - "(only make sense when logilab-pytest run one test file)") - parser.add_option('-R', '--restart', callback=rebuild_and_store, - dest="restart", default=False, - action="callback", - help="Restart tests from where it failed (implies exitfirst) " - "(only make sense if tests previously ran with exitfirst only)") - parser.add_option('--color', callback=rebuild_cmdline, - action="callback", - help="colorize tracebacks") - parser.add_option('-s', '--skip', - # XXX: I wish I could use the callback action but it - # doesn't seem to be able to get the value - # associated to the option - action="store", dest="skipped", default=None, - help="test names matching this name will be skipped " - "to skip several patterns, use commas") - parser.add_option('-q', '--quiet', callback=rebuild_cmdline, - action="callback", help="Minimal output") - parser.add_option('-P', '--profile', default=None, dest='profile', - help="Profile execution and store data in the given file") - parser.add_option('-m', '--match', default=None, dest='tags_pattern', - help="only execute test whose tag match the current pattern") - - if DJANGO_FOUND: - parser.add_option('-J', '--django', dest='django', default=False, - action="store_true", - help='use logilab-pytest for django test cases') - return parser - - -def parseargs(parser): - """Parse the command line and return (options processed), (options to pass to - unittest_main()), (explicitfile or None). - """ - # parse the command line - options, args = parser.parse_args() - filenames = [arg for arg in args if arg.endswith('.py')] - if filenames: - if len(filenames) > 1: - parser.error("only one filename is acceptable") - explicitfile = filenames[0] - args.remove(explicitfile) - else: - explicitfile = None - # someone wants DBC - testlib.ENABLE_DBC = options.dbc - newargs = parser.newargs - if options.skipped: - newargs.extend(['--skip', options.skipped]) - # restart implies exitfirst - if options.restart: - options.exitfirst = True - # append additional args to the new sys.argv and let unittest_main - # do the rest - newargs += args - return options, explicitfile - - - -@deprecated('[logilab-common 1.3] logilab-pytest is deprecated, use another test runner') -def run(): - parser = make_parser() - rootdir, testercls = project_root(parser) - options, explicitfile = parseargs(parser) - # mock a new command line - sys.argv[1:] = parser.newargs - cvg = None - if not '' in sys.path: - sys.path.insert(0, '') - if DJANGO_FOUND and options.django: - tester = DjangoTester(cvg, options) - else: - tester = testercls(cvg, options) - if explicitfile: - cmd, args = tester.testfile, (explicitfile,) - elif options.testdir: - cmd, args = tester.testonedir, (options.testdir, options.exitfirst) - else: - cmd, args = tester.testall, (options.exitfirst,) - try: - try: - if options.profile: - import hotshot - prof = hotshot.Profile(options.profile) - prof.runcall(cmd, *args) - prof.close() - print('profile data saved in', options.profile) - else: - cmd(*args) - except SystemExit: - raise - except: - import traceback - traceback.print_exc() - finally: - tester.show_report() - sys.exit(tester.errcode) - -class SkipAwareTestProgram(unittest.TestProgram): - # XXX: don't try to stay close to unittest.py, use optparse - USAGE = """\ -Usage: %(progName)s [options] [test] [...] - -Options: - -h, --help Show this message - -v, --verbose Verbose output - -i, --pdb Enable test failure inspection - -x, --exitfirst Exit on first failure - -s, --skip skip test matching this pattern (no regexp for now) - -q, --quiet Minimal output - --color colorize tracebacks - - -m, --match Run only test whose tag match this pattern - - -P, --profile FILE: Run the tests using cProfile and saving results - in FILE - -Examples: - %(progName)s - run default set of tests - %(progName)s MyTestSuite - run suite 'MyTestSuite' - %(progName)s MyTestCase.testSomething - run MyTestCase.testSomething - %(progName)s MyTestCase - run all 'test*' test methods - in MyTestCase -""" - def __init__(self, module='__main__', defaultTest=None, batchmode=False, - cvg=None, options=None, outstream=sys.stderr): - self.batchmode = batchmode - self.cvg = cvg - self.options = options - self.outstream = outstream - super(SkipAwareTestProgram, self).__init__( - module=module, defaultTest=defaultTest, - testLoader=NonStrictTestLoader()) - - def parseArgs(self, argv): - self.pdbmode = False - self.exitfirst = False - self.skipped_patterns = [] - self.test_pattern = None - self.tags_pattern = None - self.colorize = False - self.profile_name = None - import getopt - try: - options, args = getopt.getopt(argv[1:], 'hHvixrqcp:s:m:P:', - ['help', 'verbose', 'quiet', 'pdb', - 'exitfirst', 'restart', - 'skip=', 'color', 'match=', 'profile=']) - for opt, value in options: - if opt in ('-h', '-H', '--help'): - self.usageExit() - if opt in ('-i', '--pdb'): - self.pdbmode = True - if opt in ('-x', '--exitfirst'): - self.exitfirst = True - if opt in ('-r', '--restart'): - self.restart = True - self.exitfirst = True - if opt in ('-q', '--quiet'): - self.verbosity = 0 - if opt in ('-v', '--verbose'): - self.verbosity = 2 - if opt in ('-s', '--skip'): - self.skipped_patterns = [pat.strip() for pat in - value.split(', ')] - if opt == '--color': - self.colorize = True - if opt in ('-m', '--match'): - #self.tags_pattern = value - self.options["tag_pattern"] = value - if opt in ('-P', '--profile'): - self.profile_name = value - self.testLoader.skipped_patterns = self.skipped_patterns - if len(args) == 0 and self.defaultTest is None: - suitefunc = getattr(self.module, 'suite', None) - if isinstance(suitefunc, (types.FunctionType, - types.MethodType)): - self.test = self.module.suite() - else: - self.test = self.testLoader.loadTestsFromModule(self.module) - return - if len(args) > 0: - self.test_pattern = args[0] - self.testNames = args - else: - self.testNames = (self.defaultTest, ) - self.createTests() - except getopt.error as msg: - self.usageExit(msg) - - def runTests(self): - if self.profile_name: - import cProfile - cProfile.runctx('self._runTests()', globals(), locals(), self.profile_name ) - else: - return self._runTests() - - def _runTests(self): - self.testRunner = SkipAwareTextTestRunner(verbosity=self.verbosity, - stream=self.outstream, - exitfirst=self.exitfirst, - pdbmode=self.pdbmode, - cvg=self.cvg, - test_pattern=self.test_pattern, - skipped_patterns=self.skipped_patterns, - colorize=self.colorize, - batchmode=self.batchmode, - options=self.options) - - def removeSucceededTests(obj, succTests): - """ Recursive function that removes succTests from - a TestSuite or TestCase - """ - if isinstance(obj, unittest.TestSuite): - removeSucceededTests(obj._tests, succTests) - if isinstance(obj, list): - for el in obj[:]: - if isinstance(el, unittest.TestSuite): - removeSucceededTests(el, succTests) - elif isinstance(el, unittest.TestCase): - descr = '.'.join((el.__class__.__module__, - el.__class__.__name__, - el._testMethodName)) - if descr in succTests: - obj.remove(el) - # take care, self.options may be None - if getattr(self.options, 'restart', False): - # retrieve succeeded tests from FILE_RESTART - try: - restartfile = open(FILE_RESTART, 'r') - try: - succeededtests = list(elem.rstrip('\n\r') for elem in - restartfile.readlines()) - removeSucceededTests(self.test, succeededtests) - finally: - restartfile.close() - except Exception as ex: - raise Exception("Error while reading succeeded tests into %s: %s" - % (osp.join(os.getcwd(), FILE_RESTART), ex)) - - result = self.testRunner.run(self.test) - # help garbage collection: we want TestSuite, which hold refs to every - # executed TestCase, to be gc'ed - del self.test - if getattr(result, "debuggers", None) and \ - getattr(self, "pdbmode", None): - start_interactive_mode(result) - if not getattr(self, "batchmode", None): - sys.exit(not result.wasSuccessful()) - self.result = result - - -class SkipAwareTextTestRunner(unittest.TextTestRunner): - - def __init__(self, stream=sys.stderr, verbosity=1, - exitfirst=False, pdbmode=False, cvg=None, test_pattern=None, - skipped_patterns=(), colorize=False, batchmode=False, - options=None): - super(SkipAwareTextTestRunner, self).__init__(stream=stream, - verbosity=verbosity) - self.exitfirst = exitfirst - self.pdbmode = pdbmode - self.cvg = cvg - self.test_pattern = test_pattern - self.skipped_patterns = skipped_patterns - self.colorize = colorize - self.batchmode = batchmode - self.options = options - - def _this_is_skipped(self, testedname): - return any([(pat in testedname) for pat in self.skipped_patterns]) - - def _runcondition(self, test, skipgenerator=True): - if isinstance(test, testlib.InnerTest): - testname = test.name - else: - if isinstance(test, testlib.TestCase): - meth = test._get_test_method() - testname = '%s.%s' % (test.__name__, meth.__name__) - elif isinstance(test, types.FunctionType): - func = test - testname = func.__name__ - elif isinstance(test, types.MethodType): - cls = test.__self__.__class__ - testname = '%s.%s' % (cls.__name__, test.__name__) - else: - return True # Not sure when this happens - if isgeneratorfunction(test) and skipgenerator: - return self.does_match_tags(test) # Let inner tests decide at run time - if self._this_is_skipped(testname): - return False # this was explicitly skipped - if self.test_pattern is not None: - try: - classpattern, testpattern = self.test_pattern.split('.') - klass, name = testname.split('.') - if classpattern not in klass or testpattern not in name: - return False - except ValueError: - if self.test_pattern not in testname: - return False - - return self.does_match_tags(test) - - def does_match_tags(self, test): - if self.options is not None: - tags_pattern = getattr(self.options, 'tags_pattern', None) - if tags_pattern is not None: - tags = getattr(test, 'tags', testlib.Tags()) - if tags.inherit and isinstance(test, types.MethodType): - tags = tags | getattr(test.__self__.__class__, 'tags', testlib.Tags()) - return tags.match(tags_pattern) - return True # no pattern - - def _makeResult(self): - return SkipAwareTestResult(self.stream, self.descriptions, - self.verbosity, self.exitfirst, - self.pdbmode, self.cvg, self.colorize) - - def run(self, test): - "Run the given test case or test suite." - result = self._makeResult() - startTime = time() - test(result, runcondition=self._runcondition, options=self.options) - stopTime = time() - timeTaken = stopTime - startTime - result.printErrors() - if not self.batchmode: - self.stream.writeln(result.separator2) - run = result.testsRun - self.stream.writeln("Ran %d test%s in %.3fs" % - (run, run != 1 and "s" or "", timeTaken)) - self.stream.writeln() - if not result.wasSuccessful(): - if self.colorize: - self.stream.write(textutils.colorize_ansi("FAILED", color='red')) - else: - self.stream.write("FAILED") - else: - if self.colorize: - self.stream.write(textutils.colorize_ansi("OK", color='green')) - else: - self.stream.write("OK") - failed, errored, skipped = map(len, (result.failures, - result.errors, - result.skipped)) - - det_results = [] - for name, value in (("failures", result.failures), - ("errors",result.errors), - ("skipped", result.skipped)): - if value: - det_results.append("%s=%i" % (name, len(value))) - if det_results: - self.stream.write(" (") - self.stream.write(', '.join(det_results)) - self.stream.write(")") - self.stream.writeln("") - return result - - -class SkipAwareTestResult(unittest._TextTestResult): - - def __init__(self, stream, descriptions, verbosity, - exitfirst=False, pdbmode=False, cvg=None, colorize=False): - super(SkipAwareTestResult, self).__init__(stream, - descriptions, verbosity) - self.skipped = [] - self.debuggers = [] - self.fail_descrs = [] - self.error_descrs = [] - self.exitfirst = exitfirst - self.pdbmode = pdbmode - self.cvg = cvg - self.colorize = colorize - self.pdbclass = Debugger - self.verbose = verbosity > 1 - - def descrs_for(self, flavour): - return getattr(self, '%s_descrs' % flavour.lower()) - - def _create_pdb(self, test_descr, flavour): - self.descrs_for(flavour).append( (len(self.debuggers), test_descr) ) - if self.pdbmode: - self.debuggers.append(self.pdbclass(sys.exc_info()[2])) - - def _iter_valid_frames(self, frames): - """only consider non-testlib frames when formatting traceback""" - lgc_testlib = osp.abspath(__file__) - std_testlib = osp.abspath(unittest.__file__) - invalid = lambda fi: osp.abspath(fi[1]) in (lgc_testlib, std_testlib) - for frameinfo in dropwhile(invalid, frames): - yield frameinfo - - def _exc_info_to_string(self, err, test): - """Converts a sys.exc_info()-style tuple of values into a string. - - This method is overridden here because we want to colorize - lines if --color is passed, and display local variables if - --verbose is passed - """ - exctype, exc, tb = err - output = ['Traceback (most recent call last)'] - frames = inspect.getinnerframes(tb) - colorize = self.colorize - frames = enumerate(self._iter_valid_frames(frames)) - for index, (frame, filename, lineno, funcname, ctx, ctxindex) in frames: - filename = osp.abspath(filename) - if ctx is None: # pyc files or C extensions for instance - source = '' - else: - source = ''.join(ctx) - if colorize: - filename = textutils.colorize_ansi(filename, 'magenta') - source = colorize_source(source) - output.append(' File "%s", line %s, in %s' % (filename, lineno, funcname)) - output.append(' %s' % source.strip()) - if self.verbose: - output.append('%r == %r' % (dir(frame), test.__module__)) - output.append('') - output.append(' ' + ' local variables '.center(66, '-')) - for varname, value in sorted(frame.f_locals.items()): - output.append(' %s: %r' % (varname, value)) - if varname == 'self': # special handy processing for self - for varname, value in sorted(vars(value).items()): - output.append(' self.%s: %r' % (varname, value)) - output.append(' ' + '-' * 66) - output.append('') - output.append(''.join(traceback.format_exception_only(exctype, exc))) - return '\n'.join(output) - - def addError(self, test, err): - """err -> (exc_type, exc, tcbk)""" - exc_type, exc, _ = err - if isinstance(exc, testlib.SkipTest): - assert exc_type == SkipTest - self.addSkip(test, exc) - else: - if self.exitfirst: - self.shouldStop = True - descr = self.getDescription(test) - super(SkipAwareTestResult, self).addError(test, err) - self._create_pdb(descr, 'error') - - def addFailure(self, test, err): - if self.exitfirst: - self.shouldStop = True - descr = self.getDescription(test) - super(SkipAwareTestResult, self).addFailure(test, err) - self._create_pdb(descr, 'fail') - - def addSkip(self, test, reason): - self.skipped.append((test, reason)) - if self.showAll: - self.stream.writeln("SKIPPED") - elif self.dots: - self.stream.write('S') - - def printErrors(self): - super(SkipAwareTestResult, self).printErrors() - self.printSkippedList() - - def printSkippedList(self): - # format (test, err) compatible with unittest2 - for test, err in self.skipped: - descr = self.getDescription(test) - self.stream.writeln(self.separator1) - self.stream.writeln("%s: %s" % ('SKIPPED', descr)) - self.stream.writeln("\t%s" % err) - - def printErrorList(self, flavour, errors): - for (_, descr), (test, err) in zip(self.descrs_for(flavour), errors): - self.stream.writeln(self.separator1) - self.stream.writeln("%s: %s" % (flavour, descr)) - self.stream.writeln(self.separator2) - self.stream.writeln(err) - self.stream.writeln('no stdout'.center(len(self.separator2))) - self.stream.writeln('no stderr'.center(len(self.separator2))) - - -from .decorators import monkeypatch -orig_call = testlib.TestCase.__call__ -@monkeypatch(testlib.TestCase, '__call__') -def call(self, result=None, runcondition=None, options=None): - orig_call(self, result=result, runcondition=runcondition, options=options) - if hasattr(options, "exitfirst") and options.exitfirst: - # add this test to restart file - try: - restartfile = open(FILE_RESTART, 'a') - try: - descr = '.'.join((self.__class__.__module__, - self.__class__.__name__, - self._testMethodName)) - restartfile.write(descr+os.linesep) - finally: - restartfile.close() - except Exception: - print("Error while saving succeeded test into", - osp.join(os.getcwd(), FILE_RESTART), - file=sys.__stderr__) - raise - - -@monkeypatch(testlib.TestCase) -def defaultTestResult(self): - """return a new instance of the defaultTestResult""" - return SkipAwareTestResult() - - -class NonStrictTestLoader(unittest.TestLoader): - """ - Overrides default testloader to be able to omit classname when - specifying tests to run on command line. - - For example, if the file test_foo.py contains :: - - class FooTC(TestCase): - def test_foo1(self): # ... - def test_foo2(self): # ... - def test_bar1(self): # ... - - class BarTC(TestCase): - def test_bar2(self): # ... - - 'python test_foo.py' will run the 3 tests in FooTC - 'python test_foo.py FooTC' will run the 3 tests in FooTC - 'python test_foo.py test_foo' will run test_foo1 and test_foo2 - 'python test_foo.py test_foo1' will run test_foo1 - 'python test_foo.py test_bar' will run FooTC.test_bar1 and BarTC.test_bar2 - """ - - def __init__(self): - self.skipped_patterns = () - - # some magic here to accept empty list by extending - # and to provide callable capability - def loadTestsFromNames(self, names, module=None): - suites = [] - for name in names: - suites.extend(self.loadTestsFromName(name, module)) - return self.suiteClass(suites) - - def _collect_tests(self, module): - tests = {} - for obj in vars(module).values(): - if isclass(obj) and issubclass(obj, unittest.TestCase): - classname = obj.__name__ - if classname[0] == '_' or self._this_is_skipped(classname): - continue - methodnames = [] - # obj is a TestCase class - for attrname in dir(obj): - if attrname.startswith(self.testMethodPrefix): - attr = getattr(obj, attrname) - if callable(attr): - methodnames.append(attrname) - # keep track of class (obj) for convenience - tests[classname] = (obj, methodnames) - return tests - - def loadTestsFromSuite(self, module, suitename): - try: - suite = getattr(module, suitename)() - except AttributeError: - return [] - assert hasattr(suite, '_tests'), \ - "%s.%s is not a valid TestSuite" % (module.__name__, suitename) - # python2.3 does not implement __iter__ on suites, we need to return - # _tests explicitly - return suite._tests - - def loadTestsFromName(self, name, module=None): - parts = name.split('.') - if module is None or len(parts) > 2: - # let the base class do its job here - return [super(NonStrictTestLoader, self).loadTestsFromName(name)] - tests = self._collect_tests(module) - collected = [] - if len(parts) == 1: - pattern = parts[0] - if callable(getattr(module, pattern, None) - ) and pattern not in tests: - # consider it as a suite - return self.loadTestsFromSuite(module, pattern) - if pattern in tests: - # case python unittest_foo.py MyTestTC - klass, methodnames = tests[pattern] - for methodname in methodnames: - collected = [klass(methodname) - for methodname in methodnames] - else: - # case python unittest_foo.py something - for klass, methodnames in tests.values(): - # skip methodname if matched by skipped_patterns - for skip_pattern in self.skipped_patterns: - methodnames = [methodname - for methodname in methodnames - if skip_pattern not in methodname] - collected += [klass(methodname) - for methodname in methodnames - if pattern in methodname] - elif len(parts) == 2: - # case "MyClass.test_1" - classname, pattern = parts - klass, methodnames = tests.get(classname, (None, [])) - for methodname in methodnames: - collected = [klass(methodname) for methodname in methodnames - if pattern in methodname] - return collected - - def _this_is_skipped(self, testedname): - return any([(pat in testedname) for pat in self.skipped_patterns]) - - def getTestCaseNames(self, testCaseClass): - """Return a sorted sequence of method names found within testCaseClass - """ - is_skipped = self._this_is_skipped - classname = testCaseClass.__name__ - if classname[0] == '_' or is_skipped(classname): - return [] - testnames = super(NonStrictTestLoader, self).getTestCaseNames( - testCaseClass) - return [testname for testname in testnames if not is_skipped(testname)] - - -# The 2 functions below are modified versions of the TestSuite.run method -# that is provided with unittest2 for python 2.6, in unittest2/suite.py -# It is used to monkeypatch the original implementation to support -# extra runcondition and options arguments (see in testlib.py) - -def _ts_run(self, result, runcondition=None, options=None): - self._wrapped_run(result, runcondition=runcondition, options=options) - self._tearDownPreviousClass(None, result) - self._handleModuleTearDown(result) - return result - -def _ts_wrapped_run(self, result, debug=False, runcondition=None, options=None): - for test in self: - if result.shouldStop: - break - if unittest_suite._isnotsuite(test): - self._tearDownPreviousClass(test, result) - self._handleModuleFixture(test, result) - self._handleClassSetUp(test, result) - result._previousTestClass = test.__class__ - if (getattr(test.__class__, '_classSetupFailed', False) or - getattr(result, '_moduleSetUpFailed', False)): - continue - - # --- modifications to deal with _wrapped_run --- - # original code is: - # - # if not debug: - # test(result) - # else: - # test.debug() - if hasattr(test, '_wrapped_run'): - try: - test._wrapped_run(result, debug, runcondition=runcondition, options=options) - except TypeError: - test._wrapped_run(result, debug) - elif not debug: - try: - test(result, runcondition, options) - except TypeError: - test(result) - else: - test.debug() - # --- end of modifications to deal with _wrapped_run --- - return result - -if sys.version_info >= (2, 7): - # The function below implements a modified version of the - # TestSuite.run method that is provided with python 2.7, in - # unittest/suite.py - def _ts_run(self, result, debug=False, runcondition=None, options=None): - topLevel = False - if getattr(result, '_testRunEntered', False) is False: - result._testRunEntered = topLevel = True - - self._wrapped_run(result, debug, runcondition, options) - - if topLevel: - self._tearDownPreviousClass(None, result) - self._handleModuleTearDown(result) - result._testRunEntered = False - return result - - -def enable_dbc(*args): - """ - Without arguments, return True if contracts can be enabled and should be - enabled (see option -d), return False otherwise. - - With arguments, return False if contracts can't or shouldn't be enabled, - otherwise weave ContractAspect with items passed as arguments. - """ - if not ENABLE_DBC: - return False - try: - from logilab.aspects.weaver import weaver - from logilab.aspects.lib.contracts import ContractAspect - except ImportError: - sys.stderr.write( - 'Warning: logilab.aspects is not available. Contracts disabled.') - return False - for arg in args: - weaver.weave_module(arg, ContractAspect) - return True - - -# monkeypatch unittest and doctest (ouch !) -unittest._TextTestResult = SkipAwareTestResult -unittest.TextTestRunner = SkipAwareTextTestRunner -unittest.TestLoader = NonStrictTestLoader -unittest.TestProgram = SkipAwareTestProgram - -if sys.version_info >= (2, 4): - doctest.DocTestCase.__bases__ = (testlib.TestCase,) - # XXX check python2.6 compatibility - #doctest.DocTestCase._cleanups = [] - #doctest.DocTestCase._out = [] -else: - unittest.FunctionTestCase.__bases__ = (testlib.TestCase,) -unittest.TestSuite.run = _ts_run -unittest.TestSuite._wrapped_run = _ts_wrapped_run - -if __name__ == '__main__': - run() - diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/registry.py b/pymode/libs/logilab-common-1.4.1/logilab/common/registry.py deleted file mode 100644 index 07d43532..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/registry.py +++ /dev/null @@ -1,1156 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of Logilab-common. -# -# Logilab-common is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# Logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with Logilab-common. If not, see . -"""This module provides bases for predicates dispatching (the pattern in use -here is similar to what's refered as multi-dispatch or predicate-dispatch in the -literature, though a bit different since the idea is to select across different -implementation 'e.g. classes), not to dispatch a message to a function or -method. It contains the following classes: - -* :class:`RegistryStore`, the top level object which loads implementation - objects and stores them into registries. You'll usually use it to access - registries and their contained objects; - -* :class:`Registry`, the base class which contains objects semantically grouped - (for instance, sharing a same API, hence the 'implementation' name). You'll - use it to select the proper implementation according to a context. Notice you - may use registries on their own without using the store. - -.. Note:: - - implementation objects are usually designed to be accessed through the - registry and not by direct instantiation, besides to use it as base classe. - -The selection procedure is delegated to a selector, which is responsible for -scoring the object according to some context. At the end of the selection, if an -implementation has been found, an instance of this class is returned. A selector -is built from one or more predicates combined together using AND, OR, NOT -operators (actually `&`, `|` and `~`). You'll thus find some base classes to -build predicates: - -* :class:`Predicate`, the abstract base predicate class - -* :class:`AndPredicate`, :class:`OrPredicate`, :class:`NotPredicate`, which you - shouldn't have to use directly. You'll use `&`, `|` and '~' operators between - predicates directly - -* :func:`objectify_predicate` - -You'll eventually find one concrete predicate: :class:`yes` - -.. autoclass:: RegistryStore -.. autoclass:: Registry - -Predicates ----------- -.. autoclass:: Predicate -.. autofunction:: objectify_predicate -.. autoclass:: yes -.. autoclass:: AndPredicate -.. autoclass:: OrPredicate -.. autoclass:: NotPredicate - -Debugging ---------- -.. autoclass:: traced_selection - -Exceptions ----------- -.. autoclass:: RegistryException -.. autoclass:: RegistryNotFound -.. autoclass:: ObjectNotFound -.. autoclass:: NoSelectableObject -""" - -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -import sys -import pkgutil -import types -import weakref -import traceback as tb -from os import listdir, stat -from os.path import join, isdir, exists -from logging import getLogger -from warnings import warn - -from six import string_types, add_metaclass - -from logilab.common.modutils import modpath_from_file -from logilab.common.logging_ext import set_log_methods -from logilab.common.decorators import classproperty -from logilab.common.deprecation import deprecated - - -class RegistryException(Exception): - """Base class for registry exception.""" - -class RegistryNotFound(RegistryException): - """Raised when an unknown registry is requested. - - This is usually a programming/typo error. - """ - -class ObjectNotFound(RegistryException): - """Raised when an unregistered object is requested. - - This may be a programming/typo or a misconfiguration error. - """ - -class NoSelectableObject(RegistryException): - """Raised when no object is selectable for a given context.""" - def __init__(self, args, kwargs, objects): - self.args = args - self.kwargs = kwargs - self.objects = objects - - def __str__(self): - return ('args: %s, kwargs: %s\ncandidates: %s' - % (self.args, self.kwargs.keys(), self.objects)) - -class SelectAmbiguity(RegistryException): - """Raised when several objects compete at selection time with an equal - score. - - """ - - -def _modname_from_path(path, extrapath=None): - modpath = modpath_from_file(path, extrapath) - # omit '__init__' from package's name to avoid loading that module - # once for each name when it is imported by some other object - # module. This supposes import in modules are done as:: - # - # from package import something - # - # not:: - # - # from package.__init__ import something - # - # which seems quite correct. - if modpath[-1] == '__init__': - modpath.pop() - return '.'.join(modpath) - - -def _toload_info(path, extrapath, _toload=None): - """Return a dictionary of : and an ordered list of - (file, module name) to load - """ - if _toload is None: - assert isinstance(path, list) - _toload = {}, [] - for fileordir in path: - if isdir(fileordir) and exists(join(fileordir, '__init__.py')): - subfiles = [join(fileordir, fname) for fname in listdir(fileordir)] - _toload_info(subfiles, extrapath, _toload) - elif fileordir[-3:] == '.py': - modname = _modname_from_path(fileordir, extrapath) - _toload[0][modname] = fileordir - _toload[1].append((fileordir, modname)) - return _toload - - -class RegistrableObject(object): - """This is the base class for registrable objects which are selected - according to a context. - - :attr:`__registry__` - name of the registry for this object (string like 'views', - 'templates'...). You may want to define `__registries__` directly if your - object should be registered in several registries. - - :attr:`__regid__` - object's identifier in the registry (string like 'main', - 'primary', 'folder_box') - - :attr:`__select__` - class'selector - - Moreover, the `__abstract__` attribute may be set to True to indicate that a - class is abstract and should not be registered. - - You don't have to inherit from this class to put it in a registry (having - `__regid__` and `__select__` is enough), though this is needed for classes - that should be automatically registered. - """ - - __registry__ = None - __regid__ = None - __select__ = None - __abstract__ = True # see doc snipppets below (in Registry class) - - @classproperty - def __registries__(cls): - if cls.__registry__ is None: - return () - return (cls.__registry__,) - - -class RegistrableInstance(RegistrableObject): - """Inherit this class if you want instances of the classes to be - automatically registered. - """ - - def __new__(cls, *args, **kwargs): - """Add a __module__ attribute telling the module where the instance was - created, for automatic registration. - """ - module = kwargs.pop('__module__', None) - obj = super(RegistrableInstance, cls).__new__(cls) - if module is None: - warn('instantiate {0} with ' - '__module__=__name__'.format(cls.__name__), - DeprecationWarning) - # XXX subclass must no override __new__ - filepath = tb.extract_stack(limit=2)[0][0] - obj.__module__ = _modname_from_path(filepath) - else: - obj.__module__ = module - return obj - - def __init__(self, __module__=None): - super(RegistrableInstance, self).__init__() - - -class Registry(dict): - """The registry store a set of implementations associated to identifier: - - * to each identifier are associated a list of implementations - - * to select an implementation of a given identifier, you should use one of the - :meth:`select` or :meth:`select_or_none` method - - * to select a list of implementations for a context, you should use the - :meth:`possible_objects` method - - * dictionary like access to an identifier will return the bare list of - implementations for this identifier. - - To be usable in a registry, the only requirement is to have a `__select__` - attribute. - - At the end of the registration process, the :meth:`__registered__` - method is called on each registered object which have them, given the - registry in which it's registered as argument. - - Registration methods: - - .. automethod:: register - .. automethod:: unregister - - Selection methods: - - .. automethod:: select - .. automethod:: select_or_none - .. automethod:: possible_objects - .. automethod:: object_by_id - """ - def __init__(self, debugmode): - super(Registry, self).__init__() - self.debugmode = debugmode - - def __getitem__(self, name): - """return the registry (list of implementation objects) associated to - this name - """ - try: - return super(Registry, self).__getitem__(name) - except KeyError: - exc = ObjectNotFound(name) - exc.__traceback__ = sys.exc_info()[-1] - raise exc - - @classmethod - def objid(cls, obj): - """returns a unique identifier for an object stored in the registry""" - return '%s.%s' % (obj.__module__, cls.objname(obj)) - - @classmethod - def objname(cls, obj): - """returns a readable name for an object stored in the registry""" - return getattr(obj, '__name__', id(obj)) - - def initialization_completed(self): - """call method __registered__() on registered objects when the callback - is defined""" - for objects in self.values(): - for objectcls in objects: - registered = getattr(objectcls, '__registered__', None) - if registered: - registered(self) - if self.debugmode: - wrap_predicates(_lltrace) - - def register(self, obj, oid=None, clear=False): - """base method to add an object in the registry""" - assert not '__abstract__' in obj.__dict__, obj - assert obj.__select__, obj - oid = oid or obj.__regid__ - assert oid, ('no explicit name supplied to register object %s, ' - 'which has no __regid__ set' % obj) - if clear: - objects = self[oid] = [] - else: - objects = self.setdefault(oid, []) - assert not obj in objects, 'object %s is already registered' % obj - objects.append(obj) - - def register_and_replace(self, obj, replaced): - """remove and register """ - # XXXFIXME this is a duplication of unregister() - # remove register_and_replace in favor of unregister + register - # or simplify by calling unregister then register here - if not isinstance(replaced, string_types): - replaced = self.objid(replaced) - # prevent from misspelling - assert obj is not replaced, 'replacing an object by itself: %s' % obj - registered_objs = self.get(obj.__regid__, ()) - for index, registered in enumerate(registered_objs): - if self.objid(registered) == replaced: - del registered_objs[index] - break - else: - self.warning('trying to replace %s that is not registered with %s', - replaced, obj) - self.register(obj) - - def unregister(self, obj): - """remove object from this registry""" - objid = self.objid(obj) - oid = obj.__regid__ - for registered in self.get(oid, ()): - # use self.objid() to compare objects because vreg will probably - # have its own version of the object, loaded through execfile - if self.objid(registered) == objid: - self[oid].remove(registered) - break - else: - self.warning('can\'t remove %s, no id %s in the registry', - objid, oid) - - def all_objects(self): - """return a list containing all objects in this registry. - """ - result = [] - for objs in self.values(): - result += objs - return result - - # dynamic selection methods ################################################ - - def object_by_id(self, oid, *args, **kwargs): - """return object with the `oid` identifier. Only one object is expected - to be found. - - raise :exc:`ObjectNotFound` if there are no object with id `oid` in this - registry - - raise :exc:`AssertionError` if there is more than one object there - """ - objects = self[oid] - assert len(objects) == 1, objects - return objects[0](*args, **kwargs) - - def select(self, __oid, *args, **kwargs): - """return the most specific object among those with the given oid - according to the given context. - - raise :exc:`ObjectNotFound` if there are no object with id `oid` in this - registry - - raise :exc:`NoSelectableObject` if no object can be selected - """ - obj = self._select_best(self[__oid], *args, **kwargs) - if obj is None: - raise NoSelectableObject(args, kwargs, self[__oid] ) - return obj - - def select_or_none(self, __oid, *args, **kwargs): - """return the most specific object among those with the given oid - according to the given context, or None if no object applies. - """ - try: - return self._select_best(self[__oid], *args, **kwargs) - except ObjectNotFound: - return None - - def possible_objects(self, *args, **kwargs): - """return an iterator on possible objects in this registry for the given - context - """ - for objects in self.values(): - obj = self._select_best(objects, *args, **kwargs) - if obj is None: - continue - yield obj - - def _select_best(self, objects, *args, **kwargs): - """return an instance of the most specific object according - to parameters - - return None if not object apply (don't raise `NoSelectableObject` since - it's costly when searching objects using `possible_objects` - (e.g. searching for hooks). - """ - score, winners = 0, None - for obj in objects: - objectscore = obj.__select__(obj, *args, **kwargs) - if objectscore > score: - score, winners = objectscore, [obj] - elif objectscore > 0 and objectscore == score: - winners.append(obj) - if winners is None: - return None - if len(winners) > 1: - # log in production environement / test, error while debugging - msg = 'select ambiguity: %s\n(args: %s, kwargs: %s)' - if self.debugmode: - # raise bare exception in debug mode - raise SelectAmbiguity(msg % (winners, args, kwargs.keys())) - self.error(msg, winners, args, kwargs.keys()) - # return the result of calling the object - return self.selected(winners[0], args, kwargs) - - def selected(self, winner, args, kwargs): - """override here if for instance you don't want "instanciation" - """ - return winner(*args, **kwargs) - - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - info = warning = error = critical = exception = debug = lambda msg, *a, **kw: None - - -def obj_registries(cls, registryname=None): - """return a tuple of registry names (see __registries__)""" - if registryname: - return (registryname,) - return cls.__registries__ - - -class RegistryStore(dict): - """This class is responsible for loading objects and storing them - in their registry which is created on the fly as needed. - - It handles dynamic registration of objects and provides a - convenient api to access them. To be recognized as an object that - should be stored into one of the store's registry - (:class:`Registry`), an object must provide the following - attributes, used control how they interact with the registry: - - :attr:`__registries__` - list of registry names (string like 'views', 'templates'...) into which - the object should be registered - - :attr:`__regid__` - object identifier in the registry (string like 'main', - 'primary', 'folder_box') - - :attr:`__select__` - the object predicate selectors - - Moreover, the :attr:`__abstract__` attribute may be set to `True` - to indicate that an object is abstract and should not be registered - (such inherited attributes not considered). - - .. Note:: - - When using the store to load objects dynamically, you *always* have - to use **super()** to get the methods and attributes of the - superclasses, and not use the class identifier. If not, you'll get into - trouble at reload time. - - For example, instead of writing:: - - class Thing(Parent): - __regid__ = 'athing' - __select__ = yes() - - def f(self, arg1): - Parent.f(self, arg1) - - You must write:: - - class Thing(Parent): - __regid__ = 'athing' - __select__ = yes() - - def f(self, arg1): - super(Thing, self).f(arg1) - - Controlling object registration - ------------------------------- - - Dynamic loading is triggered by calling the :meth:`register_modnames` - method, given a list of modules names to inspect. - - .. automethod:: register_modnames - - For each module, by default, all compatible objects are registered - automatically. However if some objects come as replacement of - other objects, or have to be included only if some condition is - met, you'll have to define a `registration_callback(vreg)` - function in the module and explicitly register **all objects** in - this module, using the api defined below. - - - .. automethod:: RegistryStore.register_all - .. automethod:: RegistryStore.register_and_replace - .. automethod:: RegistryStore.register - .. automethod:: RegistryStore.unregister - - .. Note:: - Once the function `registration_callback(vreg)` is implemented in a - module, all the objects from this module have to be explicitly - registered as it disables the automatic object registration. - - - Examples: - - .. sourcecode:: python - - def registration_callback(store): - # register everything in the module except BabarClass - store.register_all(globals().values(), __name__, (BabarClass,)) - - # conditionally register BabarClass - if 'babar_relation' in store.schema: - store.register(BabarClass) - - In this example, we register all application object classes defined in the module - except `BabarClass`. This class is then registered only if the 'babar_relation' - relation type is defined in the instance schema. - - .. sourcecode:: python - - def registration_callback(store): - store.register(Elephant) - # replace Babar by Celeste - store.register_and_replace(Celeste, Babar) - - In this example, we explicitly register classes one by one: - - * the `Elephant` class - * the `Celeste` to replace `Babar` - - If at some point we register a new appobject class in this module, it won't be - registered at all without modification to the `registration_callback` - implementation. The first example will register it though, thanks to the call - to the `register_all` method. - - Controlling registry instantiation - ---------------------------------- - - The `REGISTRY_FACTORY` class dictionary allows to specify which class should - be instantiated for a given registry name. The class associated to `None` - key will be the class used when there is no specific class for a name. - """ - - def __init__(self, debugmode=False): - super(RegistryStore, self).__init__() - self.debugmode = debugmode - - def reset(self): - """clear all registries managed by this store""" - # don't use self.clear, we want to keep existing subdictionaries - for subdict in self.values(): - subdict.clear() - self._lastmodifs = {} - - def __getitem__(self, name): - """return the registry (dictionary of class objects) associated to - this name - """ - try: - return super(RegistryStore, self).__getitem__(name) - except KeyError: - exc = RegistryNotFound(name) - exc.__traceback__ = sys.exc_info()[-1] - raise exc - - # methods for explicit (un)registration ################################### - - # default class, when no specific class set - REGISTRY_FACTORY = {None: Registry} - - def registry_class(self, regid): - """return existing registry named regid or use factory to create one and - return it""" - try: - return self.REGISTRY_FACTORY[regid] - except KeyError: - return self.REGISTRY_FACTORY[None] - - def setdefault(self, regid): - try: - return self[regid] - except RegistryNotFound: - self[regid] = self.registry_class(regid)(self.debugmode) - return self[regid] - - def register_all(self, objects, modname, butclasses=()): - """register registrable objects into `objects`. - - Registrable objects are properly configured subclasses of - :class:`RegistrableObject`. Objects which are not defined in the module - `modname` or which are in `butclasses` won't be registered. - - Typical usage is: - - .. sourcecode:: python - - store.register_all(globals().values(), __name__, (ClassIWantToRegisterExplicitly,)) - - So you get partially automatic registration, keeping manual registration - for some object (to use - :meth:`~logilab.common.registry.RegistryStore.register_and_replace` for - instance). - """ - assert isinstance(modname, string_types), \ - 'modname expected to be a module name (ie string), got %r' % modname - for obj in objects: - if self.is_registrable(obj) and obj.__module__ == modname and not obj in butclasses: - if isinstance(obj, type): - self._load_ancestors_then_object(modname, obj, butclasses) - else: - self.register(obj) - - def register(self, obj, registryname=None, oid=None, clear=False): - """register `obj` implementation into `registryname` or - `obj.__registries__` if not specified, with identifier `oid` or - `obj.__regid__` if not specified. - - If `clear` is true, all objects with the same identifier will be - previously unregistered. - """ - assert not obj.__dict__.get('__abstract__'), obj - for registryname in obj_registries(obj, registryname): - registry = self.setdefault(registryname) - registry.register(obj, oid=oid, clear=clear) - self.debug("register %s in %s['%s']", - registry.objname(obj), registryname, oid or obj.__regid__) - self._loadedmods.setdefault(obj.__module__, {})[registry.objid(obj)] = obj - - def unregister(self, obj, registryname=None): - """unregister `obj` object from the registry `registryname` or - `obj.__registries__` if not specified. - """ - for registryname in obj_registries(obj, registryname): - registry = self[registryname] - registry.unregister(obj) - self.debug("unregister %s from %s['%s']", - registry.objname(obj), registryname, obj.__regid__) - - def register_and_replace(self, obj, replaced, registryname=None): - """register `obj` object into `registryname` or - `obj.__registries__` if not specified. If found, the `replaced` object - will be unregistered first (else a warning will be issued as it is - generally unexpected). - """ - for registryname in obj_registries(obj, registryname): - registry = self[registryname] - registry.register_and_replace(obj, replaced) - self.debug("register %s in %s['%s'] instead of %s", - registry.objname(obj), registryname, obj.__regid__, - registry.objname(replaced)) - - # initialization methods ################################################### - - def init_registration(self, path, extrapath=None): - """reset registry and walk down path to return list of (path, name) - file modules to be loaded""" - # XXX make this private by renaming it to _init_registration ? - self.reset() - # compute list of all modules that have to be loaded - self._toloadmods, filemods = _toload_info(path, extrapath) - # XXX is _loadedmods still necessary ? It seems like it's useful - # to avoid loading same module twice, especially with the - # _load_ancestors_then_object logic but this needs to be checked - self._loadedmods = {} - return filemods - - @deprecated('use register_modnames() instead') - def register_objects(self, path, extrapath=None): - """register all objects found walking down """ - # load views from each directory in the instance's path - # XXX inline init_registration ? - filemods = self.init_registration(path, extrapath) - for filepath, modname in filemods: - self.load_file(filepath, modname) - self.initialization_completed() - - def register_modnames(self, modnames): - """register all objects found in """ - self.reset() - self._loadedmods = {} - self._toloadmods = {} - toload = [] - for modname in modnames: - filepath = pkgutil.find_loader(modname).get_filename() - if filepath[-4:] in ('.pyc', '.pyo'): - # The source file *must* exists - filepath = filepath[:-1] - self._toloadmods[modname] = filepath - toload.append((filepath, modname)) - for filepath, modname in toload: - self.load_file(filepath, modname) - self.initialization_completed() - - def initialization_completed(self): - """call initialization_completed() on all known registries""" - for reg in self.values(): - reg.initialization_completed() - - def _mdate(self, filepath): - """ return the modification date of a file path """ - try: - return stat(filepath)[-2] - except OSError: - # this typically happens on emacs backup files (.#foo.py) - self.warning('Unable to load %s. It is likely to be a backup file', - filepath) - return None - - def is_reload_needed(self, path): - """return True if something module changed and the registry should be - reloaded - """ - lastmodifs = self._lastmodifs - for fileordir in path: - if isdir(fileordir) and exists(join(fileordir, '__init__.py')): - if self.is_reload_needed([join(fileordir, fname) - for fname in listdir(fileordir)]): - return True - elif fileordir[-3:] == '.py': - mdate = self._mdate(fileordir) - if mdate is None: - continue # backup file, see _mdate implementation - elif "flymake" in fileordir: - # flymake + pylint in use, don't consider these they will corrupt the registry - continue - if fileordir not in lastmodifs or lastmodifs[fileordir] < mdate: - self.info('File %s changed since last visit', fileordir) - return True - return False - - def load_file(self, filepath, modname): - """ load registrable objects (if any) from a python file """ - if modname in self._loadedmods: - return - self._loadedmods[modname] = {} - mdate = self._mdate(filepath) - if mdate is None: - return # backup file, see _mdate implementation - elif "flymake" in filepath: - # flymake + pylint in use, don't consider these they will corrupt the registry - return - # set update time before module loading, else we get some reloading - # weirdness in case of syntax error or other error while importing the - # module - self._lastmodifs[filepath] = mdate - # load the module - if sys.version_info < (3,) and not isinstance(modname, str): - modname = str(modname) - module = __import__(modname, fromlist=modname.split('.')[:-1]) - self.load_module(module) - - def load_module(self, module): - """Automatically handle module objects registration. - - Instances are registered as soon as they are hashable and have the - following attributes: - - * __regid__ (a string) - * __select__ (a callable) - * __registries__ (a tuple/list of string) - - For classes this is a bit more complicated : - - - first ensure parent classes are already registered - - - class with __abstract__ == True in their local dictionary are skipped - - - object class needs to have registries and identifier properly set to a - non empty string to be registered. - """ - self.info('loading %s from %s', module.__name__, module.__file__) - if hasattr(module, 'registration_callback'): - module.registration_callback(self) - else: - self.register_all(vars(module).values(), module.__name__) - - def _load_ancestors_then_object(self, modname, objectcls, butclasses=()): - """handle class registration according to rules defined in - :meth:`load_module` - """ - # backward compat, we used to allow whatever else than classes - if not isinstance(objectcls, type): - if self.is_registrable(objectcls) and objectcls.__module__ == modname: - self.register(objectcls) - return - # imported classes - objmodname = objectcls.__module__ - if objmodname != modname: - # The module of the object is not the same as the currently - # worked on module, or this is actually an instance, which - # has no module at all - if objmodname in self._toloadmods: - # if this is still scheduled for loading, let's proceed immediately, - # but using the object module - self.load_file(self._toloadmods[objmodname], objmodname) - return - # ensure object hasn't been already processed - clsid = '%s.%s' % (modname, objectcls.__name__) - if clsid in self._loadedmods[modname]: - return - self._loadedmods[modname][clsid] = objectcls - # ensure ancestors are registered - for parent in objectcls.__bases__: - self._load_ancestors_then_object(modname, parent, butclasses) - # ensure object is registrable - if objectcls in butclasses or not self.is_registrable(objectcls): - return - # backward compat - reg = self.setdefault(obj_registries(objectcls)[0]) - if reg.objname(objectcls)[0] == '_': - warn("[lgc 0.59] object whose name start with '_' won't be " - "skipped anymore at some point, use __abstract__ = True " - "instead (%s)" % objectcls, DeprecationWarning) - return - # register, finally - self.register(objectcls) - - @classmethod - def is_registrable(cls, obj): - """ensure `obj` should be registered - - as arbitrary stuff may be registered, do a lot of check and warn about - weird cases (think to dumb proxy objects) - """ - if isinstance(obj, type): - if not issubclass(obj, RegistrableObject): - # ducktyping backward compat - if not (getattr(obj, '__registries__', None) - and getattr(obj, '__regid__', None) - and getattr(obj, '__select__', None)): - return False - elif issubclass(obj, RegistrableInstance): - return False - elif not isinstance(obj, RegistrableInstance): - return False - if not obj.__regid__: - return False # no regid - registries = obj.__registries__ - if not registries: - return False # no registries - selector = obj.__select__ - if not selector: - return False # no selector - if obj.__dict__.get('__abstract__', False): - return False - # then detect potential problems that should be warned - if not isinstance(registries, (tuple, list)): - cls.warning('%s has __registries__ which is not a list or tuple', obj) - return False - if not callable(selector): - cls.warning('%s has not callable __select__', obj) - return False - return True - - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - info = warning = error = critical = exception = debug = lambda msg, *a, **kw: None - - -# init logging -set_log_methods(RegistryStore, getLogger('registry.store')) -set_log_methods(Registry, getLogger('registry')) - - -# helpers for debugging selectors -TRACED_OIDS = None - -def _trace_selector(cls, selector, args, ret): - vobj = args[0] - if TRACED_OIDS == 'all' or vobj.__regid__ in TRACED_OIDS: - print('%s -> %s for %s(%s)' % (cls, ret, vobj, vobj.__regid__)) - -def _lltrace(selector): - """use this decorator on your predicates so they become traceable with - :class:`traced_selection` - """ - def traced(cls, *args, **kwargs): - ret = selector(cls, *args, **kwargs) - if TRACED_OIDS is not None: - _trace_selector(cls, selector, args, ret) - return ret - traced.__name__ = selector.__name__ - traced.__doc__ = selector.__doc__ - return traced - -class traced_selection(object): # pylint: disable=C0103 - """ - Typical usage is : - - .. sourcecode:: python - - >>> from logilab.common.registry import traced_selection - >>> with traced_selection(): - ... # some code in which you want to debug selectors - ... # for all objects - - This will yield lines like this in the logs:: - - selector one_line_rset returned 0 for - - You can also give to :class:`traced_selection` the identifiers of objects on - which you want to debug selection ('oid1' and 'oid2' in the example above). - - .. sourcecode:: python - - >>> with traced_selection( ('regid1', 'regid2') ): - ... # some code in which you want to debug selectors - ... # for objects with __regid__ 'regid1' and 'regid2' - - A potentially useful point to set up such a tracing function is - the `logilab.common.registry.Registry.select` method body. - """ - - def __init__(self, traced='all'): - self.traced = traced - - def __enter__(self): - global TRACED_OIDS - TRACED_OIDS = self.traced - - def __exit__(self, exctype, exc, traceback): - global TRACED_OIDS - TRACED_OIDS = None - return traceback is None - -# selector base classes and operations ######################################## - -def objectify_predicate(selector_func): - """Most of the time, a simple score function is enough to build a selector. - The :func:`objectify_predicate` decorator turn it into a proper selector - class:: - - @objectify_predicate - def one(cls, req, rset=None, **kwargs): - return 1 - - class MyView(View): - __select__ = View.__select__ & one() - - """ - return type(selector_func.__name__, (Predicate,), - {'__doc__': selector_func.__doc__, - '__call__': lambda self, *a, **kw: selector_func(*a, **kw)}) - - -_PREDICATES = {} - -def wrap_predicates(decorator): - for predicate in _PREDICATES.values(): - if not '_decorators' in predicate.__dict__: - predicate._decorators = set() - if decorator in predicate._decorators: - continue - predicate._decorators.add(decorator) - predicate.__call__ = decorator(predicate.__call__) - -class PredicateMetaClass(type): - def __new__(mcs, *args, **kwargs): - # use __new__ so subclasses doesn't have to call Predicate.__init__ - inst = type.__new__(mcs, *args, **kwargs) - proxy = weakref.proxy(inst, lambda p: _PREDICATES.pop(id(p))) - _PREDICATES[id(proxy)] = proxy - return inst - - -@add_metaclass(PredicateMetaClass) -class Predicate(object): - """base class for selector classes providing implementation - for operators ``&``, ``|`` and ``~`` - - This class is only here to give access to binary operators, the selector - logic itself should be implemented in the :meth:`__call__` method. Notice it - should usually accept any arbitrary arguments (the context), though that may - vary depending on your usage of the registry. - - a selector is called to help choosing the correct object for a - particular context by returning a score (`int`) telling how well - the implementation given as first argument fit to the given context. - - 0 score means that the class doesn't apply. - """ - - @property - def func_name(self): - # backward compatibility - return self.__class__.__name__ - - def search_selector(self, selector): - """search for the given selector, selector instance or tuple of - selectors in the selectors tree. Return None if not found. - """ - if self is selector: - return self - if (isinstance(selector, type) or isinstance(selector, tuple)) and \ - isinstance(self, selector): - return self - return None - - def __str__(self): - return self.__class__.__name__ - - def __and__(self, other): - return AndPredicate(self, other) - def __rand__(self, other): - return AndPredicate(other, self) - def __iand__(self, other): - return AndPredicate(self, other) - def __or__(self, other): - return OrPredicate(self, other) - def __ror__(self, other): - return OrPredicate(other, self) - def __ior__(self, other): - return OrPredicate(self, other) - - def __invert__(self): - return NotPredicate(self) - - # XXX (function | function) or (function & function) not managed yet - - def __call__(self, cls, *args, **kwargs): - return NotImplementedError("selector %s must implement its logic " - "in its __call__ method" % self.__class__) - - def __repr__(self): - return u'' % (self.__class__.__name__, id(self)) - - -class MultiPredicate(Predicate): - """base class for compound selector classes""" - - def __init__(self, *selectors): - self.selectors = self.merge_selectors(selectors) - - def __str__(self): - return '%s(%s)' % (self.__class__.__name__, - ','.join(str(s) for s in self.selectors)) - - @classmethod - def merge_selectors(cls, selectors): - """deal with selector instanciation when necessary and merge - multi-selectors if possible: - - AndPredicate(AndPredicate(sel1, sel2), AndPredicate(sel3, sel4)) - ==> AndPredicate(sel1, sel2, sel3, sel4) - """ - merged_selectors = [] - for selector in selectors: - # XXX do we really want magic-transformations below? - # if so, wanna warn about them? - if isinstance(selector, types.FunctionType): - selector = objectify_predicate(selector)() - if isinstance(selector, type) and issubclass(selector, Predicate): - selector = selector() - assert isinstance(selector, Predicate), selector - if isinstance(selector, cls): - merged_selectors += selector.selectors - else: - merged_selectors.append(selector) - return merged_selectors - - def search_selector(self, selector): - """search for the given selector or selector instance (or tuple of - selectors) in the selectors tree. Return None if not found - """ - for childselector in self.selectors: - if childselector is selector: - return childselector - found = childselector.search_selector(selector) - if found is not None: - return found - # if not found in children, maybe we are looking for self? - return super(MultiPredicate, self).search_selector(selector) - - -class AndPredicate(MultiPredicate): - """and-chained selectors""" - def __call__(self, cls, *args, **kwargs): - score = 0 - for selector in self.selectors: - partscore = selector(cls, *args, **kwargs) - if not partscore: - return 0 - score += partscore - return score - - -class OrPredicate(MultiPredicate): - """or-chained selectors""" - def __call__(self, cls, *args, **kwargs): - for selector in self.selectors: - partscore = selector(cls, *args, **kwargs) - if partscore: - return partscore - return 0 - -class NotPredicate(Predicate): - """negation selector""" - def __init__(self, selector): - self.selector = selector - - def __call__(self, cls, *args, **kwargs): - score = self.selector(cls, *args, **kwargs) - return int(not score) - - def __str__(self): - return 'NOT(%s)' % self.selector - - -class yes(Predicate): # pylint: disable=C0103 - """Return the score given as parameter, with a default score of 0.5 so any - other selector take precedence. - - Usually used for objects which can be selected whatever the context, or - also sometimes to add arbitrary points to a score. - - Take care, `yes(0)` could be named 'no'... - """ - def __init__(self, score=0.5): - self.score = score - - def __call__(self, *args, **kwargs): - return self.score - - -# deprecated stuff ############################################################# - -@deprecated('[lgc 0.59] use Registry.objid class method instead') -def classid(cls): - return '%s.%s' % (cls.__module__, cls.__name__) - -@deprecated('[lgc 0.59] use obj_registries function instead') -def class_registries(cls, registryname): - return obj_registries(cls, registryname) - diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/shellutils.py b/pymode/libs/logilab-common-1.4.1/logilab/common/shellutils.py deleted file mode 100644 index b9d5fa6d..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/shellutils.py +++ /dev/null @@ -1,406 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""shell/term utilities, useful to write some python scripts instead of shell -scripts. -""" - -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -import os -import glob -import shutil -import stat -import sys -import tempfile -import time -import fnmatch -import errno -import string -import random -import subprocess -from os.path import exists, isdir, islink, basename, join - -from six import string_types -from six.moves import range, input as raw_input - -from logilab.common import STD_BLACKLIST, _handle_blacklist -from logilab.common.compat import str_to_bytes -from logilab.common.deprecation import deprecated - - -class tempdir(object): - - def __enter__(self): - self.path = tempfile.mkdtemp() - return self.path - - def __exit__(self, exctype, value, traceback): - # rmtree in all cases - shutil.rmtree(self.path) - return traceback is None - - -class pushd(object): - def __init__(self, directory): - self.directory = directory - - def __enter__(self): - self.cwd = os.getcwd() - os.chdir(self.directory) - return self.directory - - def __exit__(self, exctype, value, traceback): - os.chdir(self.cwd) - - -def chown(path, login=None, group=None): - """Same as `os.chown` function but accepting user login or group name as - argument. If login or group is omitted, it's left unchanged. - - Note: you must own the file to chown it (or be root). Otherwise OSError is raised. - """ - if login is None: - uid = -1 - else: - try: - uid = int(login) - except ValueError: - import pwd # Platforms: Unix - uid = pwd.getpwnam(login).pw_uid - if group is None: - gid = -1 - else: - try: - gid = int(group) - except ValueError: - import grp - gid = grp.getgrnam(group).gr_gid - os.chown(path, uid, gid) - -def mv(source, destination, _action=shutil.move): - """A shell-like mv, supporting wildcards. - """ - sources = glob.glob(source) - if len(sources) > 1: - assert isdir(destination) - for filename in sources: - _action(filename, join(destination, basename(filename))) - else: - try: - source = sources[0] - except IndexError: - raise OSError('No file matching %s' % source) - if isdir(destination) and exists(destination): - destination = join(destination, basename(source)) - try: - _action(source, destination) - except OSError as ex: - raise OSError('Unable to move %r to %r (%s)' % ( - source, destination, ex)) - -def rm(*files): - """A shell-like rm, supporting wildcards. - """ - for wfile in files: - for filename in glob.glob(wfile): - if islink(filename): - os.remove(filename) - elif isdir(filename): - shutil.rmtree(filename) - else: - os.remove(filename) - -def cp(source, destination): - """A shell-like cp, supporting wildcards. - """ - mv(source, destination, _action=shutil.copy) - -def find(directory, exts, exclude=False, blacklist=STD_BLACKLIST): - """Recursively find files ending with the given extensions from the directory. - - :type directory: str - :param directory: - directory where the search should start - - :type exts: basestring or list or tuple - :param exts: - extensions or lists or extensions to search - - :type exclude: boolean - :param exts: - if this argument is True, returning files NOT ending with the given - extensions - - :type blacklist: list or tuple - :param blacklist: - optional list of files or directory to ignore, default to the value of - `logilab.common.STD_BLACKLIST` - - :rtype: list - :return: - the list of all matching files - """ - if isinstance(exts, string_types): - exts = (exts,) - if exclude: - def match(filename, exts): - for ext in exts: - if filename.endswith(ext): - return False - return True - else: - def match(filename, exts): - for ext in exts: - if filename.endswith(ext): - return True - return False - files = [] - for dirpath, dirnames, filenames in os.walk(directory): - _handle_blacklist(blacklist, dirnames, filenames) - # don't append files if the directory is blacklisted - dirname = basename(dirpath) - if dirname in blacklist: - continue - files.extend([join(dirpath, f) for f in filenames if match(f, exts)]) - return files - - -def globfind(directory, pattern, blacklist=STD_BLACKLIST): - """Recursively finds files matching glob `pattern` under `directory`. - - This is an alternative to `logilab.common.shellutils.find`. - - :type directory: str - :param directory: - directory where the search should start - - :type pattern: basestring - :param pattern: - the glob pattern (e.g *.py, foo*.py, etc.) - - :type blacklist: list or tuple - :param blacklist: - optional list of files or directory to ignore, default to the value of - `logilab.common.STD_BLACKLIST` - - :rtype: iterator - :return: - iterator over the list of all matching files - """ - for curdir, dirnames, filenames in os.walk(directory): - _handle_blacklist(blacklist, dirnames, filenames) - for fname in fnmatch.filter(filenames, pattern): - yield join(curdir, fname) - -def unzip(archive, destdir): - import zipfile - if not exists(destdir): - os.mkdir(destdir) - zfobj = zipfile.ZipFile(archive) - for name in zfobj.namelist(): - if name.endswith('/'): - os.mkdir(join(destdir, name)) - else: - outfile = open(join(destdir, name), 'wb') - outfile.write(zfobj.read(name)) - outfile.close() - - -class Execute: - """This is a deadlock safe version of popen2 (no stdin), that returns - an object with errorlevel, out and err. - """ - - def __init__(self, command): - cmd = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - self.out, self.err = cmd.communicate() - self.status = os.WEXITSTATUS(cmd.returncode) - -Execute = deprecated('Use subprocess.Popen instead')(Execute) - - -class ProgressBar(object): - """A simple text progression bar.""" - - def __init__(self, nbops, size=20, stream=sys.stdout, title=''): - if title: - self._fstr = '\r%s [%%-%ss]' % (title, int(size)) - else: - self._fstr = '\r[%%-%ss]' % int(size) - self._stream = stream - self._total = nbops - self._size = size - self._current = 0 - self._progress = 0 - self._current_text = None - self._last_text_write_size = 0 - - def _get_text(self): - return self._current_text - - def _set_text(self, text=None): - if text != self._current_text: - self._current_text = text - self.refresh() - - def _del_text(self): - self.text = None - - text = property(_get_text, _set_text, _del_text) - - def update(self, offset=1, exact=False): - """Move FORWARD to new cursor position (cursor will never go backward). - - :offset: fraction of ``size`` - - :exact: - - - False: offset relative to current cursor position if True - - True: offset as an asbsolute position - - """ - if exact: - self._current = offset - else: - self._current += offset - - progress = int((float(self._current)/float(self._total))*self._size) - if progress > self._progress: - self._progress = progress - self.refresh() - - def refresh(self): - """Refresh the progression bar display.""" - self._stream.write(self._fstr % ('=' * min(self._progress, self._size)) ) - if self._last_text_write_size or self._current_text: - template = ' %%-%is' % (self._last_text_write_size) - text = self._current_text - if text is None: - text = '' - self._stream.write(template % text) - self._last_text_write_size = len(text.rstrip()) - self._stream.flush() - - def finish(self): - self._stream.write('\n') - self._stream.flush() - - -class DummyProgressBar(object): - __slots__ = ('text',) - - def refresh(self): - pass - def update(self): - pass - def finish(self): - pass - - -_MARKER = object() -class progress(object): - - def __init__(self, nbops=_MARKER, size=_MARKER, stream=_MARKER, title=_MARKER, enabled=True): - self.nbops = nbops - self.size = size - self.stream = stream - self.title = title - self.enabled = enabled - - def __enter__(self): - if self.enabled: - kwargs = {} - for attr in ('nbops', 'size', 'stream', 'title'): - value = getattr(self, attr) - if value is not _MARKER: - kwargs[attr] = value - self.pb = ProgressBar(**kwargs) - else: - self.pb = DummyProgressBar() - return self.pb - - def __exit__(self, exc_type, exc_val, exc_tb): - self.pb.finish() - -class RawInput(object): - - def __init__(self, input=None, printer=None): - self._input = input or raw_input - self._print = printer - - def ask(self, question, options, default): - assert default in options - choices = [] - for option in options: - if option == default: - label = option[0].upper() - else: - label = option[0].lower() - if len(option) > 1: - label += '(%s)' % option[1:].lower() - choices.append((option, label)) - prompt = "%s [%s]: " % (question, - '/'.join([opt[1] for opt in choices])) - tries = 3 - while tries > 0: - answer = self._input(prompt).strip().lower() - if not answer: - return default - possible = [option for option, label in choices - if option.lower().startswith(answer)] - if len(possible) == 1: - return possible[0] - elif len(possible) == 0: - msg = '%s is not an option.' % answer - else: - msg = ('%s is an ambiguous answer, do you mean %s ?' % ( - answer, ' or '.join(possible))) - if self._print: - self._print(msg) - else: - print(msg) - tries -= 1 - raise Exception('unable to get a sensible answer') - - def confirm(self, question, default_is_yes=True): - default = default_is_yes and 'y' or 'n' - answer = self.ask(question, ('y', 'n'), default) - return answer == 'y' - -ASK = RawInput() - - -def getlogin(): - """avoid using os.getlogin() because of strange tty / stdin problems - (man 3 getlogin) - Another solution would be to use $LOGNAME, $USER or $USERNAME - """ - if sys.platform != 'win32': - import pwd # Platforms: Unix - return pwd.getpwuid(os.getuid())[0] - else: - return os.environ['USERNAME'] - -def generate_password(length=8, vocab=string.ascii_letters + string.digits): - """dumb password generation function""" - pwd = '' - for i in range(length): - pwd += random.choice(vocab) - return pwd diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/sphinx_ext.py b/pymode/libs/logilab-common-1.4.1/logilab/common/sphinx_ext.py deleted file mode 100644 index a24608ce..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/sphinx_ext.py +++ /dev/null @@ -1,87 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -from logilab.common.decorators import monkeypatch - -from sphinx.ext import autodoc - -class DocstringOnlyModuleDocumenter(autodoc.ModuleDocumenter): - objtype = 'docstring' - def format_signature(self): - pass - def add_directive_header(self, sig): - pass - def document_members(self, all_members=False): - pass - - def resolve_name(self, modname, parents, path, base): - if modname is not None: - return modname, parents + [base] - return (path or '') + base, [] - - -#autodoc.add_documenter(DocstringOnlyModuleDocumenter) - -def setup(app): - app.add_autodocumenter(DocstringOnlyModuleDocumenter) - - - -from sphinx.ext.autodoc import (ViewList, Options, AutodocReporter, nodes, - assemble_option_dict, nested_parse_with_titles) - -@monkeypatch(autodoc.AutoDirective) -def run(self): - self.filename_set = set() # a set of dependent filenames - self.reporter = self.state.document.reporter - self.env = self.state.document.settings.env - self.warnings = [] - self.result = ViewList() - - # find out what documenter to call - objtype = self.name[4:] - doc_class = self._registry[objtype] - # process the options with the selected documenter's option_spec - self.genopt = Options(assemble_option_dict( - self.options.items(), doc_class.option_spec)) - # generate the output - documenter = doc_class(self, self.arguments[0]) - documenter.generate(more_content=self.content) - if not self.result: - return self.warnings - - # record all filenames as dependencies -- this will at least - # partially make automatic invalidation possible - for fn in self.filename_set: - self.env.note_dependency(fn) - - # use a custom reporter that correctly assigns lines to source - # filename/description and lineno - old_reporter = self.state.memo.reporter - self.state.memo.reporter = AutodocReporter(self.result, - self.state.memo.reporter) - if self.name in ('automodule', 'autodocstring'): - node = nodes.section() - # necessary so that the child nodes get the right source/line set - node.document = self.state.document - nested_parse_with_titles(self.state, self.result, node) - else: - node = nodes.paragraph() - node.document = self.state.document - self.state.nested_parse(self.result, 0, node) - self.state.memo.reporter = old_reporter - return self.warnings + node.children diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/sphinxutils.py b/pymode/libs/logilab-common-1.4.1/logilab/common/sphinxutils.py deleted file mode 100644 index ab6e8a18..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/sphinxutils.py +++ /dev/null @@ -1,122 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Sphinx utils - -ModuleGenerator: Generate a file that lists all the modules of a list of -packages in order to pull all the docstring. -This should not be used in a makefile to systematically generate sphinx -documentation! - -Typical usage: - ->>> from logilab.common.sphinxutils import ModuleGenerator ->>> mgen = ModuleGenerator('logilab common', '/home/adim/src/logilab/common') ->>> mgen.generate('api_logilab_common.rst', exclude_dirs=('test',)) -""" - -import os, sys -import os.path as osp -import inspect - -from logilab.common import STD_BLACKLIST -from logilab.common.shellutils import globfind -from logilab.common.modutils import load_module_from_file, modpath_from_file - -def module_members(module): - members = [] - for name, value in inspect.getmembers(module): - if getattr(value, '__module__', None) == module.__name__: - members.append( (name, value) ) - return sorted(members) - - -def class_members(klass): - return sorted([name for name in vars(klass) - if name not in ('__doc__', '__module__', - '__dict__', '__weakref__')]) - -class ModuleGenerator: - file_header = """.. -*- coding: utf-8 -*-\n\n%s\n""" - module_def = """ -:mod:`%s` -=======%s - -.. automodule:: %s - :members: %s -""" - class_def = """ - -.. autoclass:: %s - :members: %s - -""" - - def __init__(self, project_title, code_dir): - self.title = project_title - self.code_dir = osp.abspath(code_dir) - - def generate(self, dest_file, exclude_dirs=STD_BLACKLIST): - """make the module file""" - self.fn = open(dest_file, 'w') - num = len(self.title) + 6 - title = "=" * num + "\n %s API\n" % self.title + "=" * num - self.fn.write(self.file_header % title) - self.gen_modules(exclude_dirs=exclude_dirs) - self.fn.close() - - def gen_modules(self, exclude_dirs): - """generate all modules""" - for module in self.find_modules(exclude_dirs): - modname = module.__name__ - classes = [] - modmembers = [] - for objname, obj in module_members(module): - if inspect.isclass(obj): - classmembers = class_members(obj) - classes.append( (objname, classmembers) ) - else: - modmembers.append(objname) - self.fn.write(self.module_def % (modname, '=' * len(modname), - modname, - ', '.join(modmembers))) - for klass, members in classes: - self.fn.write(self.class_def % (klass, ', '.join(members))) - - def find_modules(self, exclude_dirs): - basepath = osp.dirname(self.code_dir) - basedir = osp.basename(basepath) + osp.sep - if basedir not in sys.path: - sys.path.insert(1, basedir) - for filepath in globfind(self.code_dir, '*.py', exclude_dirs): - if osp.basename(filepath) in ('setup.py', '__pkginfo__.py'): - continue - try: - module = load_module_from_file(filepath) - except: # module might be broken or magic - dotted_path = modpath_from_file(filepath) - module = type('.'.join(dotted_path), (), {}) # mock it - yield module - - -if __name__ == '__main__': - # example : - title, code_dir, outfile = sys.argv[1:] - generator = ModuleGenerator(title, code_dir) - # XXX modnames = ['logilab'] - generator.generate(outfile, ('test', 'tests', 'examples', - 'data', 'doc', '.hg', 'migration')) diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/table.py b/pymode/libs/logilab-common-1.4.1/logilab/common/table.py deleted file mode 100644 index 2f3df694..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/table.py +++ /dev/null @@ -1,929 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Table management module.""" - -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -from six.moves import range - -class Table(object): - """Table defines a data table with column and row names. - inv: - len(self.data) <= len(self.row_names) - forall(self.data, lambda x: len(x) <= len(self.col_names)) - """ - - def __init__(self, default_value=0, col_names=None, row_names=None): - self.col_names = [] - self.row_names = [] - self.data = [] - self.default_value = default_value - if col_names: - self.create_columns(col_names) - if row_names: - self.create_rows(row_names) - - def _next_row_name(self): - return 'row%s' % (len(self.row_names)+1) - - def __iter__(self): - return iter(self.data) - - def __eq__(self, other): - if other is None: - return False - else: - return list(self) == list(other) - - __hash__ = object.__hash__ - - def __ne__(self, other): - return not self == other - - def __len__(self): - return len(self.row_names) - - ## Rows / Columns creation ################################################# - def create_rows(self, row_names): - """Appends row_names to the list of existing rows - """ - self.row_names.extend(row_names) - for row_name in row_names: - self.data.append([self.default_value]*len(self.col_names)) - - def create_columns(self, col_names): - """Appends col_names to the list of existing columns - """ - for col_name in col_names: - self.create_column(col_name) - - def create_row(self, row_name=None): - """Creates a rowname to the row_names list - """ - row_name = row_name or self._next_row_name() - self.row_names.append(row_name) - self.data.append([self.default_value]*len(self.col_names)) - - - def create_column(self, col_name): - """Creates a colname to the col_names list - """ - self.col_names.append(col_name) - for row in self.data: - row.append(self.default_value) - - ## Sort by column ########################################################## - def sort_by_column_id(self, col_id, method = 'asc'): - """Sorts the table (in-place) according to data stored in col_id - """ - try: - col_index = self.col_names.index(col_id) - self.sort_by_column_index(col_index, method) - except ValueError: - raise KeyError("Col (%s) not found in table" % (col_id)) - - - def sort_by_column_index(self, col_index, method = 'asc'): - """Sorts the table 'in-place' according to data stored in col_index - - method should be in ('asc', 'desc') - """ - sort_list = sorted([(row[col_index], row, row_name) - for row, row_name in zip(self.data, self.row_names)]) - # Sorting sort_list will sort according to col_index - # If we want reverse sort, then reverse list - if method.lower() == 'desc': - sort_list.reverse() - - # Rebuild data / row names - self.data = [] - self.row_names = [] - for val, row, row_name in sort_list: - self.data.append(row) - self.row_names.append(row_name) - - def groupby(self, colname, *others): - """builds indexes of data - :returns: nested dictionaries pointing to actual rows - """ - groups = {} - colnames = (colname,) + others - col_indexes = [self.col_names.index(col_id) for col_id in colnames] - for row in self.data: - ptr = groups - for col_index in col_indexes[:-1]: - ptr = ptr.setdefault(row[col_index], {}) - ptr = ptr.setdefault(row[col_indexes[-1]], - Table(default_value=self.default_value, - col_names=self.col_names)) - ptr.append_row(tuple(row)) - return groups - - def select(self, colname, value): - grouped = self.groupby(colname) - try: - return grouped[value] - except KeyError: - return [] - - def remove(self, colname, value): - col_index = self.col_names.index(colname) - for row in self.data[:]: - if row[col_index] == value: - self.data.remove(row) - - - ## The 'setter' part ####################################################### - def set_cell(self, row_index, col_index, data): - """sets value of cell 'row_indew', 'col_index' to data - """ - self.data[row_index][col_index] = data - - - def set_cell_by_ids(self, row_id, col_id, data): - """sets value of cell mapped by row_id and col_id to data - Raises a KeyError if row_id or col_id are not found in the table - """ - try: - row_index = self.row_names.index(row_id) - except ValueError: - raise KeyError("Row (%s) not found in table" % (row_id)) - else: - try: - col_index = self.col_names.index(col_id) - self.data[row_index][col_index] = data - except ValueError: - raise KeyError("Column (%s) not found in table" % (col_id)) - - - def set_row(self, row_index, row_data): - """sets the 'row_index' row - pre: - type(row_data) == types.ListType - len(row_data) == len(self.col_names) - """ - self.data[row_index] = row_data - - - def set_row_by_id(self, row_id, row_data): - """sets the 'row_id' column - pre: - type(row_data) == types.ListType - len(row_data) == len(self.row_names) - Raises a KeyError if row_id is not found - """ - try: - row_index = self.row_names.index(row_id) - self.set_row(row_index, row_data) - except ValueError: - raise KeyError('Row (%s) not found in table' % (row_id)) - - - def append_row(self, row_data, row_name=None): - """Appends a row to the table - pre: - type(row_data) == types.ListType - len(row_data) == len(self.col_names) - """ - row_name = row_name or self._next_row_name() - self.row_names.append(row_name) - self.data.append(row_data) - return len(self.data) - 1 - - def insert_row(self, index, row_data, row_name=None): - """Appends row_data before 'index' in the table. To make 'insert' - behave like 'list.insert', inserting in an out of range index will - insert row_data to the end of the list - pre: - type(row_data) == types.ListType - len(row_data) == len(self.col_names) - """ - row_name = row_name or self._next_row_name() - self.row_names.insert(index, row_name) - self.data.insert(index, row_data) - - - def delete_row(self, index): - """Deletes the 'index' row in the table, and returns it. - Raises an IndexError if index is out of range - """ - self.row_names.pop(index) - return self.data.pop(index) - - - def delete_row_by_id(self, row_id): - """Deletes the 'row_id' row in the table. - Raises a KeyError if row_id was not found. - """ - try: - row_index = self.row_names.index(row_id) - self.delete_row(row_index) - except ValueError: - raise KeyError('Row (%s) not found in table' % (row_id)) - - - def set_column(self, col_index, col_data): - """sets the 'col_index' column - pre: - type(col_data) == types.ListType - len(col_data) == len(self.row_names) - """ - - for row_index, cell_data in enumerate(col_data): - self.data[row_index][col_index] = cell_data - - - def set_column_by_id(self, col_id, col_data): - """sets the 'col_id' column - pre: - type(col_data) == types.ListType - len(col_data) == len(self.col_names) - Raises a KeyError if col_id is not found - """ - try: - col_index = self.col_names.index(col_id) - self.set_column(col_index, col_data) - except ValueError: - raise KeyError('Column (%s) not found in table' % (col_id)) - - - def append_column(self, col_data, col_name): - """Appends the 'col_index' column - pre: - type(col_data) == types.ListType - len(col_data) == len(self.row_names) - """ - self.col_names.append(col_name) - for row_index, cell_data in enumerate(col_data): - self.data[row_index].append(cell_data) - - - def insert_column(self, index, col_data, col_name): - """Appends col_data before 'index' in the table. To make 'insert' - behave like 'list.insert', inserting in an out of range index will - insert col_data to the end of the list - pre: - type(col_data) == types.ListType - len(col_data) == len(self.row_names) - """ - self.col_names.insert(index, col_name) - for row_index, cell_data in enumerate(col_data): - self.data[row_index].insert(index, cell_data) - - - def delete_column(self, index): - """Deletes the 'index' column in the table, and returns it. - Raises an IndexError if index is out of range - """ - self.col_names.pop(index) - return [row.pop(index) for row in self.data] - - - def delete_column_by_id(self, col_id): - """Deletes the 'col_id' col in the table. - Raises a KeyError if col_id was not found. - """ - try: - col_index = self.col_names.index(col_id) - self.delete_column(col_index) - except ValueError: - raise KeyError('Column (%s) not found in table' % (col_id)) - - - ## The 'getter' part ####################################################### - - def get_shape(self): - """Returns a tuple which represents the table's shape - """ - return len(self.row_names), len(self.col_names) - shape = property(get_shape) - - def __getitem__(self, indices): - """provided for convenience""" - rows, multirows = None, False - cols, multicols = None, False - if isinstance(indices, tuple): - rows = indices[0] - if len(indices) > 1: - cols = indices[1] - else: - rows = indices - # define row slice - if isinstance(rows, str): - try: - rows = self.row_names.index(rows) - except ValueError: - raise KeyError("Row (%s) not found in table" % (rows)) - if isinstance(rows, int): - rows = slice(rows, rows+1) - multirows = False - else: - rows = slice(None) - multirows = True - # define col slice - if isinstance(cols, str): - try: - cols = self.col_names.index(cols) - except ValueError: - raise KeyError("Column (%s) not found in table" % (cols)) - if isinstance(cols, int): - cols = slice(cols, cols+1) - multicols = False - else: - cols = slice(None) - multicols = True - # get sub-table - tab = Table() - tab.default_value = self.default_value - tab.create_rows(self.row_names[rows]) - tab.create_columns(self.col_names[cols]) - for idx, row in enumerate(self.data[rows]): - tab.set_row(idx, row[cols]) - if multirows : - if multicols: - return tab - else: - return [item[0] for item in tab.data] - else: - if multicols: - return tab.data[0] - else: - return tab.data[0][0] - - def get_cell_by_ids(self, row_id, col_id): - """Returns the element at [row_id][col_id] - """ - try: - row_index = self.row_names.index(row_id) - except ValueError: - raise KeyError("Row (%s) not found in table" % (row_id)) - else: - try: - col_index = self.col_names.index(col_id) - except ValueError: - raise KeyError("Column (%s) not found in table" % (col_id)) - return self.data[row_index][col_index] - - def get_row_by_id(self, row_id): - """Returns the 'row_id' row - """ - try: - row_index = self.row_names.index(row_id) - except ValueError: - raise KeyError("Row (%s) not found in table" % (row_id)) - return self.data[row_index] - - def get_column_by_id(self, col_id, distinct=False): - """Returns the 'col_id' col - """ - try: - col_index = self.col_names.index(col_id) - except ValueError: - raise KeyError("Column (%s) not found in table" % (col_id)) - return self.get_column(col_index, distinct) - - def get_columns(self): - """Returns all the columns in the table - """ - return [self[:, index] for index in range(len(self.col_names))] - - def get_column(self, col_index, distinct=False): - """get a column by index""" - col = [row[col_index] for row in self.data] - if distinct: - col = list(set(col)) - return col - - def apply_stylesheet(self, stylesheet): - """Applies the stylesheet to this table - """ - for instruction in stylesheet.instructions: - eval(instruction) - - - def transpose(self): - """Keeps the self object intact, and returns the transposed (rotated) - table. - """ - transposed = Table() - transposed.create_rows(self.col_names) - transposed.create_columns(self.row_names) - for col_index, column in enumerate(self.get_columns()): - transposed.set_row(col_index, column) - return transposed - - - def pprint(self): - """returns a string representing the table in a pretty - printed 'text' format. - """ - # The maximum row name (to know the start_index of the first col) - max_row_name = 0 - for row_name in self.row_names: - if len(row_name) > max_row_name: - max_row_name = len(row_name) - col_start = max_row_name + 5 - - lines = [] - # Build the 'first' line <=> the col_names one - # The first cell <=> an empty one - col_names_line = [' '*col_start] - for col_name in self.col_names: - col_names_line.append(col_name + ' '*5) - lines.append('|' + '|'.join(col_names_line) + '|') - max_line_length = len(lines[0]) - - # Build the table - for row_index, row in enumerate(self.data): - line = [] - # First, build the row_name's cell - row_name = self.row_names[row_index] - line.append(row_name + ' '*(col_start-len(row_name))) - - # Then, build all the table's cell for this line. - for col_index, cell in enumerate(row): - col_name_length = len(self.col_names[col_index]) + 5 - data = str(cell) - line.append(data + ' '*(col_name_length - len(data))) - lines.append('|' + '|'.join(line) + '|') - if len(lines[-1]) > max_line_length: - max_line_length = len(lines[-1]) - - # Wrap the table with '-' to make a frame - lines.insert(0, '-'*max_line_length) - lines.append('-'*max_line_length) - return '\n'.join(lines) - - - def __repr__(self): - return repr(self.data) - - def as_text(self): - data = [] - # We must convert cells into strings before joining them - for row in self.data: - data.append([str(cell) for cell in row]) - lines = ['\t'.join(row) for row in data] - return '\n'.join(lines) - - - -class TableStyle: - """Defines a table's style - """ - - def __init__(self, table): - - self._table = table - self.size = dict([(col_name, '1*') for col_name in table.col_names]) - # __row_column__ is a special key to define the first column which - # actually has no name (<=> left most column <=> row names column) - self.size['__row_column__'] = '1*' - self.alignment = dict([(col_name, 'right') - for col_name in table.col_names]) - self.alignment['__row_column__'] = 'right' - - # We shouldn't have to create an entry for - # the 1st col (the row_column one) - self.units = dict([(col_name, '') for col_name in table.col_names]) - self.units['__row_column__'] = '' - - # XXX FIXME : params order should be reversed for all set() methods - def set_size(self, value, col_id): - """sets the size of the specified col_id to value - """ - self.size[col_id] = value - - def set_size_by_index(self, value, col_index): - """Allows to set the size according to the column index rather than - using the column's id. - BE CAREFUL : the '0' column is the '__row_column__' one ! - """ - if col_index == 0: - col_id = '__row_column__' - else: - col_id = self._table.col_names[col_index-1] - - self.size[col_id] = value - - - def set_alignment(self, value, col_id): - """sets the alignment of the specified col_id to value - """ - self.alignment[col_id] = value - - - def set_alignment_by_index(self, value, col_index): - """Allows to set the alignment according to the column index rather than - using the column's id. - BE CAREFUL : the '0' column is the '__row_column__' one ! - """ - if col_index == 0: - col_id = '__row_column__' - else: - col_id = self._table.col_names[col_index-1] - - self.alignment[col_id] = value - - - def set_unit(self, value, col_id): - """sets the unit of the specified col_id to value - """ - self.units[col_id] = value - - - def set_unit_by_index(self, value, col_index): - """Allows to set the unit according to the column index rather than - using the column's id. - BE CAREFUL : the '0' column is the '__row_column__' one ! - (Note that in the 'unit' case, you shouldn't have to set a unit - for the 1st column (the __row__column__ one)) - """ - if col_index == 0: - col_id = '__row_column__' - else: - col_id = self._table.col_names[col_index-1] - - self.units[col_id] = value - - - def get_size(self, col_id): - """Returns the size of the specified col_id - """ - return self.size[col_id] - - - def get_size_by_index(self, col_index): - """Allows to get the size according to the column index rather than - using the column's id. - BE CAREFUL : the '0' column is the '__row_column__' one ! - """ - if col_index == 0: - col_id = '__row_column__' - else: - col_id = self._table.col_names[col_index-1] - - return self.size[col_id] - - - def get_alignment(self, col_id): - """Returns the alignment of the specified col_id - """ - return self.alignment[col_id] - - - def get_alignment_by_index(self, col_index): - """Allors to get the alignment according to the column index rather than - using the column's id. - BE CAREFUL : the '0' column is the '__row_column__' one ! - """ - if col_index == 0: - col_id = '__row_column__' - else: - col_id = self._table.col_names[col_index-1] - - return self.alignment[col_id] - - - def get_unit(self, col_id): - """Returns the unit of the specified col_id - """ - return self.units[col_id] - - - def get_unit_by_index(self, col_index): - """Allors to get the unit according to the column index rather than - using the column's id. - BE CAREFUL : the '0' column is the '__row_column__' one ! - """ - if col_index == 0: - col_id = '__row_column__' - else: - col_id = self._table.col_names[col_index-1] - - return self.units[col_id] - - -import re -CELL_PROG = re.compile("([0-9]+)_([0-9]+)") - -class TableStyleSheet: - """A simple Table stylesheet - Rules are expressions where cells are defined by the row_index - and col_index separated by an underscore ('_'). - For example, suppose you want to say that the (2,5) cell must be - the sum of its two preceding cells in the row, you would create - the following rule : - 2_5 = 2_3 + 2_4 - You can also use all the math.* operations you want. For example: - 2_5 = sqrt(2_3**2 + 2_4**2) - """ - - def __init__(self, rules = None): - rules = rules or [] - self.rules = [] - self.instructions = [] - for rule in rules: - self.add_rule(rule) - - - def add_rule(self, rule): - """Adds a rule to the stylesheet rules - """ - try: - source_code = ['from math import *'] - source_code.append(CELL_PROG.sub(r'self.data[\1][\2]', rule)) - self.instructions.append(compile('\n'.join(source_code), - 'table.py', 'exec')) - self.rules.append(rule) - except SyntaxError: - print("Bad Stylesheet Rule : %s [skipped]" % rule) - - - def add_rowsum_rule(self, dest_cell, row_index, start_col, end_col): - """Creates and adds a rule to sum over the row at row_index from - start_col to end_col. - dest_cell is a tuple of two elements (x,y) of the destination cell - No check is done for indexes ranges. - pre: - start_col >= 0 - end_col > start_col - """ - cell_list = ['%d_%d'%(row_index, index) for index in range(start_col, - end_col + 1)] - rule = '%d_%d=' % dest_cell + '+'.join(cell_list) - self.add_rule(rule) - - - def add_rowavg_rule(self, dest_cell, row_index, start_col, end_col): - """Creates and adds a rule to make the row average (from start_col - to end_col) - dest_cell is a tuple of two elements (x,y) of the destination cell - No check is done for indexes ranges. - pre: - start_col >= 0 - end_col > start_col - """ - cell_list = ['%d_%d'%(row_index, index) for index in range(start_col, - end_col + 1)] - num = (end_col - start_col + 1) - rule = '%d_%d=' % dest_cell + '('+'+'.join(cell_list)+')/%f'%num - self.add_rule(rule) - - - def add_colsum_rule(self, dest_cell, col_index, start_row, end_row): - """Creates and adds a rule to sum over the col at col_index from - start_row to end_row. - dest_cell is a tuple of two elements (x,y) of the destination cell - No check is done for indexes ranges. - pre: - start_row >= 0 - end_row > start_row - """ - cell_list = ['%d_%d'%(index, col_index) for index in range(start_row, - end_row + 1)] - rule = '%d_%d=' % dest_cell + '+'.join(cell_list) - self.add_rule(rule) - - - def add_colavg_rule(self, dest_cell, col_index, start_row, end_row): - """Creates and adds a rule to make the col average (from start_row - to end_row) - dest_cell is a tuple of two elements (x,y) of the destination cell - No check is done for indexes ranges. - pre: - start_row >= 0 - end_row > start_row - """ - cell_list = ['%d_%d'%(index, col_index) for index in range(start_row, - end_row + 1)] - num = (end_row - start_row + 1) - rule = '%d_%d=' % dest_cell + '('+'+'.join(cell_list)+')/%f'%num - self.add_rule(rule) - - - -class TableCellRenderer: - """Defines a simple text renderer - """ - - def __init__(self, **properties): - """keywords should be properties with an associated boolean as value. - For example : - renderer = TableCellRenderer(units = True, alignment = False) - An unspecified property will have a 'False' value by default. - Possible properties are : - alignment, unit - """ - self.properties = properties - - - def render_cell(self, cell_coord, table, table_style): - """Renders the cell at 'cell_coord' in the table, using table_style - """ - row_index, col_index = cell_coord - cell_value = table.data[row_index][col_index] - final_content = self._make_cell_content(cell_value, - table_style, col_index +1) - return self._render_cell_content(final_content, - table_style, col_index + 1) - - - def render_row_cell(self, row_name, table, table_style): - """Renders the cell for 'row_id' row - """ - cell_value = row_name - return self._render_cell_content(cell_value, table_style, 0) - - - def render_col_cell(self, col_name, table, table_style): - """Renders the cell for 'col_id' row - """ - cell_value = col_name - col_index = table.col_names.index(col_name) - return self._render_cell_content(cell_value, table_style, col_index +1) - - - - def _render_cell_content(self, content, table_style, col_index): - """Makes the appropriate rendering for this cell content. - Rendering properties will be searched using the - *table_style.get_xxx_by_index(col_index)' methods - - **This method should be overridden in the derived renderer classes.** - """ - return content - - - def _make_cell_content(self, cell_content, table_style, col_index): - """Makes the cell content (adds decoration data, like units for - example) - """ - final_content = cell_content - if 'skip_zero' in self.properties: - replacement_char = self.properties['skip_zero'] - else: - replacement_char = 0 - if replacement_char and final_content == 0: - return replacement_char - - try: - units_on = self.properties['units'] - if units_on: - final_content = self._add_unit( - cell_content, table_style, col_index) - except KeyError: - pass - - return final_content - - - def _add_unit(self, cell_content, table_style, col_index): - """Adds unit to the cell_content if needed - """ - unit = table_style.get_unit_by_index(col_index) - return str(cell_content) + " " + unit - - - -class DocbookRenderer(TableCellRenderer): - """Defines how to render a cell for a docboook table - """ - - def define_col_header(self, col_index, table_style): - """Computes the colspec element according to the style - """ - size = table_style.get_size_by_index(col_index) - return '\n' % \ - (col_index, size) - - - def _render_cell_content(self, cell_content, table_style, col_index): - """Makes the appropriate rendering for this cell content. - Rendering properties will be searched using the - table_style.get_xxx_by_index(col_index)' methods. - """ - try: - align_on = self.properties['alignment'] - alignment = table_style.get_alignment_by_index(col_index) - if align_on: - return "%s\n" % \ - (alignment, cell_content) - except KeyError: - # KeyError <=> Default alignment - return "%s\n" % cell_content - - -class TableWriter: - """A class to write tables - """ - - def __init__(self, stream, table, style, **properties): - self._stream = stream - self.style = style or TableStyle(table) - self._table = table - self.properties = properties - self.renderer = None - - - def set_style(self, style): - """sets the table's associated style - """ - self.style = style - - - def set_renderer(self, renderer): - """sets the way to render cell - """ - self.renderer = renderer - - - def update_properties(self, **properties): - """Updates writer's properties (for cell rendering) - """ - self.properties.update(properties) - - - def write_table(self, title = ""): - """Writes the table - """ - raise NotImplementedError("write_table must be implemented !") - - - -class DocbookTableWriter(TableWriter): - """Defines an implementation of TableWriter to write a table in Docbook - """ - - def _write_headers(self): - """Writes col headers - """ - # Define col_headers (colstpec elements) - for col_index in range(len(self._table.col_names)+1): - self._stream.write(self.renderer.define_col_header(col_index, - self.style)) - - self._stream.write("\n\n") - # XXX FIXME : write an empty entry <=> the first (__row_column) column - self._stream.write('\n') - for col_name in self._table.col_names: - self._stream.write(self.renderer.render_col_cell( - col_name, self._table, - self.style)) - - self._stream.write("\n\n") - - - def _write_body(self): - """Writes the table body - """ - self._stream.write('\n') - - for row_index, row in enumerate(self._table.data): - self._stream.write('\n') - row_name = self._table.row_names[row_index] - # Write the first entry (row_name) - self._stream.write(self.renderer.render_row_cell(row_name, - self._table, - self.style)) - - for col_index, cell in enumerate(row): - self._stream.write(self.renderer.render_cell( - (row_index, col_index), - self._table, self.style)) - - self._stream.write('\n') - - self._stream.write('\n') - - - def write_table(self, title = ""): - """Writes the table - """ - self._stream.write('\nCodestin Search App\n'%(title)) - self._stream.write( - '\n'% - (len(self._table.col_names)+1)) - self._write_headers() - self._write_body() - - self._stream.write('\n
\n') - - diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/tasksqueue.py b/pymode/libs/logilab-common-1.4.1/logilab/common/tasksqueue.py deleted file mode 100644 index ed74cf5a..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/tasksqueue.py +++ /dev/null @@ -1,101 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Prioritized tasks queue""" - -__docformat__ = "restructuredtext en" - -from bisect import insort_left - -from six.moves import queue - -LOW = 0 -MEDIUM = 10 -HIGH = 100 - -PRIORITY = { - 'LOW': LOW, - 'MEDIUM': MEDIUM, - 'HIGH': HIGH, - } -REVERSE_PRIORITY = dict((values, key) for key, values in PRIORITY.items()) - - - -class PrioritizedTasksQueue(queue.Queue): - - def _init(self, maxsize): - """Initialize the queue representation""" - self.maxsize = maxsize - # ordered list of task, from the lowest to the highest priority - self.queue = [] - - def _put(self, item): - """Put a new item in the queue""" - for i, task in enumerate(self.queue): - # equivalent task - if task == item: - # if new task has a higher priority, remove the one already - # queued so the new priority will be considered - if task < item: - item.merge(task) - del self.queue[i] - break - # else keep it so current order is kept - task.merge(item) - return - insort_left(self.queue, item) - - def _get(self): - """Get an item from the queue""" - return self.queue.pop() - - def __iter__(self): - return iter(self.queue) - - def remove(self, tid): - """remove a specific task from the queue""" - # XXX acquire lock - for i, task in enumerate(self): - if task.id == tid: - self.queue.pop(i) - return - raise ValueError('not task of id %s in queue' % tid) - -class Task(object): - def __init__(self, tid, priority=LOW): - # task id - self.id = tid - # task priority - self.priority = priority - - def __repr__(self): - return '' % (self.id, id(self)) - - def __cmp__(self, other): - return cmp(self.priority, other.priority) - - def __lt__(self, other): - return self.priority < other.priority - - def __eq__(self, other): - return self.id == other.id - - __hash__ = object.__hash__ - - def merge(self, other): - pass diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/testlib.py b/pymode/libs/logilab-common-1.4.1/logilab/common/testlib.py deleted file mode 100644 index fa3e36ee..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/testlib.py +++ /dev/null @@ -1,708 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Run tests. - -This will find all modules whose name match a given prefix in the test -directory, and run them. Various command line options provide -additional facilities. - -Command line options: - - -v verbose -- run tests in verbose mode with output to stdout - -q quiet -- don't print anything except if a test fails - -t testdir -- directory where the tests will be found - -x exclude -- add a test to exclude - -p profile -- profiled execution - -d dbc -- enable design-by-contract - -m match -- only run test matching the tag pattern which follow - -If no non-option arguments are present, prefixes used are 'test', -'regrtest', 'smoketest' and 'unittest'. - -""" - -from __future__ import print_function - -__docformat__ = "restructuredtext en" -# modified copy of some functions from test/regrtest.py from PyXml -# disable camel case warning -# pylint: disable=C0103 - -from contextlib import contextmanager -import sys -import os, os.path as osp -import re -import difflib -import tempfile -import math -import warnings -from shutil import rmtree -from operator import itemgetter -from inspect import isgeneratorfunction - -from six import PY2, add_metaclass, string_types -from six.moves import builtins, range, configparser, input - -from logilab.common.deprecation import class_deprecated, deprecated - -import unittest as unittest_legacy -if not getattr(unittest_legacy, "__package__", None): - try: - import unittest2 as unittest - from unittest2 import SkipTest - except ImportError: - raise ImportError("You have to install python-unittest2 to use %s" % __name__) -else: - import unittest as unittest - from unittest import SkipTest - -from functools import wraps - -from logilab.common.debugger import Debugger -from logilab.common.decorators import cached, classproperty -from logilab.common import textutils - - -__all__ = ['unittest_main', 'find_tests', 'nocoverage', 'pause_trace'] - -DEFAULT_PREFIXES = ('test', 'regrtest', 'smoketest', 'unittest', - 'func', 'validation') - -is_generator = deprecated('[lgc 0.63] use inspect.isgeneratorfunction')(isgeneratorfunction) - -# used by unittest to count the number of relevant levels in the traceback -__unittest = 1 - - -@deprecated('with_tempdir is deprecated, use {0}.TemporaryDirectory.'.format( - 'tempfile' if not PY2 else 'backports.tempfile')) -def with_tempdir(callable): - """A decorator ensuring no temporary file left when the function return - Work only for temporary file created with the tempfile module""" - if isgeneratorfunction(callable): - def proxy(*args, **kwargs): - old_tmpdir = tempfile.gettempdir() - new_tmpdir = tempfile.mkdtemp(prefix="temp-lgc-") - tempfile.tempdir = new_tmpdir - try: - for x in callable(*args, **kwargs): - yield x - finally: - try: - rmtree(new_tmpdir, ignore_errors=True) - finally: - tempfile.tempdir = old_tmpdir - return proxy - - @wraps(callable) - def proxy(*args, **kargs): - - old_tmpdir = tempfile.gettempdir() - new_tmpdir = tempfile.mkdtemp(prefix="temp-lgc-") - tempfile.tempdir = new_tmpdir - try: - return callable(*args, **kargs) - finally: - try: - rmtree(new_tmpdir, ignore_errors=True) - finally: - tempfile.tempdir = old_tmpdir - return proxy - -def in_tempdir(callable): - """A decorator moving the enclosed function inside the tempfile.tempfdir - """ - @wraps(callable) - def proxy(*args, **kargs): - - old_cwd = os.getcwd() - os.chdir(tempfile.tempdir) - try: - return callable(*args, **kargs) - finally: - os.chdir(old_cwd) - return proxy - -def within_tempdir(callable): - """A decorator run the enclosed function inside a tmpdir removed after execution - """ - proxy = with_tempdir(in_tempdir(callable)) - proxy.__name__ = callable.__name__ - return proxy - -def find_tests(testdir, - prefixes=DEFAULT_PREFIXES, suffix=".py", - excludes=(), - remove_suffix=True): - """ - Return a list of all applicable test modules. - """ - tests = [] - for name in os.listdir(testdir): - if not suffix or name.endswith(suffix): - for prefix in prefixes: - if name.startswith(prefix): - if remove_suffix and name.endswith(suffix): - name = name[:-len(suffix)] - if name not in excludes: - tests.append(name) - tests.sort() - return tests - - -## PostMortem Debug facilities ##### -def start_interactive_mode(result): - """starts an interactive shell so that the user can inspect errors - """ - debuggers = result.debuggers - descrs = result.error_descrs + result.fail_descrs - if len(debuggers) == 1: - # don't ask for test name if there's only one failure - debuggers[0].start() - else: - while True: - testindex = 0 - print("Choose a test to debug:") - # order debuggers in the same way than errors were printed - print("\n".join(['\t%s : %s' % (i, descr) for i, (_, descr) - in enumerate(descrs)])) - print("Type 'exit' (or ^D) to quit") - print() - try: - todebug = input('Enter a test name: ') - if todebug.strip().lower() == 'exit': - print() - break - else: - try: - testindex = int(todebug) - debugger = debuggers[descrs[testindex][0]] - except (ValueError, IndexError): - print("ERROR: invalid test number %r" % (todebug, )) - else: - debugger.start() - except (EOFError, KeyboardInterrupt): - print() - break - - -# coverage pausing tools ##################################################### - -@contextmanager -def replace_trace(trace=None): - """A context manager that temporary replaces the trace function""" - oldtrace = sys.gettrace() - sys.settrace(trace) - try: - yield - finally: - # specific hack to work around a bug in pycoverage, see - # https://bitbucket.org/ned/coveragepy/issue/123 - if (oldtrace is not None and not callable(oldtrace) and - hasattr(oldtrace, 'pytrace')): - oldtrace = oldtrace.pytrace - sys.settrace(oldtrace) - - -pause_trace = replace_trace - - -def nocoverage(func): - """Function decorator that pauses tracing functions""" - if hasattr(func, 'uncovered'): - return func - func.uncovered = True - - def not_covered(*args, **kwargs): - with pause_trace(): - return func(*args, **kwargs) - not_covered.uncovered = True - return not_covered - - -# test utils ################################################################## - - -# Add deprecation warnings about new api used by module level fixtures in unittest2 -# http://www.voidspace.org.uk/python/articles/unittest2.shtml#setupmodule-and-teardownmodule -class _DebugResult(object): # simplify import statement among unittest flavors.. - "Used by the TestSuite to hold previous class when running in debug." - _previousTestClass = None - _moduleSetUpFailed = False - shouldStop = False - -# backward compatibility: TestSuite might be imported from lgc.testlib -TestSuite = unittest.TestSuite - -class keywords(dict): - """Keyword args (**kwargs) support for generative tests.""" - -class starargs(tuple): - """Variable arguments (*args) for generative tests.""" - def __new__(cls, *args): - return tuple.__new__(cls, args) - -unittest_main = unittest.main - - -class InnerTestSkipped(SkipTest): - """raised when a test is skipped""" - pass - -def parse_generative_args(params): - args = [] - varargs = () - kwargs = {} - flags = 0 # 2 <=> starargs, 4 <=> kwargs - for param in params: - if isinstance(param, starargs): - varargs = param - if flags: - raise TypeError('found starargs after keywords !') - flags |= 2 - args += list(varargs) - elif isinstance(param, keywords): - kwargs = param - if flags & 4: - raise TypeError('got multiple keywords parameters') - flags |= 4 - elif flags & 2 or flags & 4: - raise TypeError('found parameters after kwargs or args') - else: - args.append(param) - - return args, kwargs - - -class InnerTest(tuple): - def __new__(cls, name, *data): - instance = tuple.__new__(cls, data) - instance.name = name - return instance - -class Tags(set): - """A set of tag able validate an expression""" - - def __init__(self, *tags, **kwargs): - self.inherit = kwargs.pop('inherit', True) - if kwargs: - raise TypeError("%s are an invalid keyword argument for this function" % kwargs.keys()) - - if len(tags) == 1 and not isinstance(tags[0], string_types): - tags = tags[0] - super(Tags, self).__init__(tags, **kwargs) - - def __getitem__(self, key): - return key in self - - def match(self, exp): - return eval(exp, {}, self) - - def __or__(self, other): - return Tags(*super(Tags, self).__or__(other)) - - -# duplicate definition from unittest2 of the _deprecate decorator -def _deprecate(original_func): - def deprecated_func(*args, **kwargs): - warnings.warn( - ('Please use %s instead.' % original_func.__name__), - DeprecationWarning, 2) - return original_func(*args, **kwargs) - return deprecated_func - -class TestCase(unittest.TestCase): - """A unittest.TestCase extension with some additional methods.""" - maxDiff = None - tags = Tags() - - def __init__(self, methodName='runTest'): - super(TestCase, self).__init__(methodName) - self.__exc_info = sys.exc_info - self.__testMethodName = self._testMethodName - self._current_test_descr = None - self._options_ = None - - @classproperty - @cached - def datadir(cls): # pylint: disable=E0213 - """helper attribute holding the standard test's data directory - - NOTE: this is a logilab's standard - """ - mod = sys.modules[cls.__module__] - return osp.join(osp.dirname(osp.abspath(mod.__file__)), 'data') - # cache it (use a class method to cache on class since TestCase is - # instantiated for each test run) - - @classmethod - def datapath(cls, *fname): - """joins the object's datadir and `fname`""" - return osp.join(cls.datadir, *fname) - - def set_description(self, descr): - """sets the current test's description. - This can be useful for generative tests because it allows to specify - a description per yield - """ - self._current_test_descr = descr - - # override default's unittest.py feature - def shortDescription(self): - """override default unittest shortDescription to handle correctly - generative tests - """ - if self._current_test_descr is not None: - return self._current_test_descr - return super(TestCase, self).shortDescription() - - def quiet_run(self, result, func, *args, **kwargs): - try: - func(*args, **kwargs) - except (KeyboardInterrupt, SystemExit): - raise - except unittest.SkipTest as e: - if hasattr(result, 'addSkip'): - result.addSkip(self, str(e)) - else: - warnings.warn("TestResult has no addSkip method, skips not reported", - RuntimeWarning, 2) - result.addSuccess(self) - return False - except: - result.addError(self, self.__exc_info()) - return False - return True - - def _get_test_method(self): - """return the test method""" - return getattr(self, self._testMethodName) - - def optval(self, option, default=None): - """return the option value or default if the option is not define""" - return getattr(self._options_, option, default) - - def __call__(self, result=None, runcondition=None, options=None): - """rewrite TestCase.__call__ to support generative tests - This is mostly a copy/paste from unittest.py (i.e same - variable names, same logic, except for the generative tests part) - """ - if result is None: - result = self.defaultTestResult() - self._options_ = options - # if result.cvg: - # result.cvg.start() - testMethod = self._get_test_method() - if (getattr(self.__class__, "__unittest_skip__", False) or - getattr(testMethod, "__unittest_skip__", False)): - # If the class or method was skipped. - try: - skip_why = (getattr(self.__class__, '__unittest_skip_why__', '') - or getattr(testMethod, '__unittest_skip_why__', '')) - if hasattr(result, 'addSkip'): - result.addSkip(self, skip_why) - else: - warnings.warn("TestResult has no addSkip method, skips not reported", - RuntimeWarning, 2) - result.addSuccess(self) - finally: - result.stopTest(self) - return - if runcondition and not runcondition(testMethod): - return # test is skipped - result.startTest(self) - try: - if not self.quiet_run(result, self.setUp): - return - generative = isgeneratorfunction(testMethod) - # generative tests - if generative: - self._proceed_generative(result, testMethod, - runcondition) - else: - status = self._proceed(result, testMethod) - success = (status == 0) - if not self.quiet_run(result, self.tearDown): - return - if not generative and success: - result.addSuccess(self) - finally: - # if result.cvg: - # result.cvg.stop() - result.stopTest(self) - - def _proceed_generative(self, result, testfunc, runcondition=None): - # cancel startTest()'s increment - result.testsRun -= 1 - success = True - try: - for params in testfunc(): - if runcondition and not runcondition(testfunc, - skipgenerator=False): - if not (isinstance(params, InnerTest) - and runcondition(params)): - continue - if not isinstance(params, (tuple, list)): - params = (params, ) - func = params[0] - args, kwargs = parse_generative_args(params[1:]) - # increment test counter manually - result.testsRun += 1 - status = self._proceed(result, func, args, kwargs) - if status == 0: - result.addSuccess(self) - success = True - else: - success = False - # XXX Don't stop anymore if an error occured - #if status == 2: - # result.shouldStop = True - if result.shouldStop: # either on error or on exitfirst + error - break - except self.failureException: - result.addFailure(self, self.__exc_info()) - success = False - except SkipTest as e: - result.addSkip(self, e) - except: - # if an error occurs between two yield - result.addError(self, self.__exc_info()) - success = False - return success - - def _proceed(self, result, testfunc, args=(), kwargs=None): - """proceed the actual test - returns 0 on success, 1 on failure, 2 on error - - Note: addSuccess can't be called here because we have to wait - for tearDown to be successfully executed to declare the test as - successful - """ - kwargs = kwargs or {} - try: - testfunc(*args, **kwargs) - except self.failureException: - result.addFailure(self, self.__exc_info()) - return 1 - except KeyboardInterrupt: - raise - except InnerTestSkipped as e: - result.addSkip(self, e) - return 1 - except SkipTest as e: - result.addSkip(self, e) - return 0 - except: - result.addError(self, self.__exc_info()) - return 2 - return 0 - - def innerSkip(self, msg=None): - """mark a generative test as skipped for the reason""" - msg = msg or 'test was skipped' - raise InnerTestSkipped(msg) - - if sys.version_info >= (3,2): - assertItemsEqual = unittest.TestCase.assertCountEqual - else: - assertCountEqual = unittest.TestCase.assertItemsEqual - -TestCase.assertItemsEqual = deprecated('assertItemsEqual is deprecated, use assertCountEqual')( - TestCase.assertItemsEqual) - -import doctest - -class SkippedSuite(unittest.TestSuite): - def test(self): - """just there to trigger test execution""" - self.skipped_test('doctest module has no DocTestSuite class') - - -class DocTestFinder(doctest.DocTestFinder): - - def __init__(self, *args, **kwargs): - self.skipped = kwargs.pop('skipped', ()) - doctest.DocTestFinder.__init__(self, *args, **kwargs) - - def _get_test(self, obj, name, module, globs, source_lines): - """override default _get_test method to be able to skip tests - according to skipped attribute's value - """ - if getattr(obj, '__name__', '') in self.skipped: - return None - return doctest.DocTestFinder._get_test(self, obj, name, module, - globs, source_lines) - - -@add_metaclass(class_deprecated) -class DocTest(TestCase): - """trigger module doctest - I don't know how to make unittest.main consider the DocTestSuite instance - without this hack - """ - __deprecation_warning__ = 'use stdlib doctest module with unittest API directly' - skipped = () - def __call__(self, result=None, runcondition=None, options=None):\ - # pylint: disable=W0613 - try: - finder = DocTestFinder(skipped=self.skipped) - suite = doctest.DocTestSuite(self.module, test_finder=finder) - # XXX iirk - doctest.DocTestCase._TestCase__exc_info = sys.exc_info - except AttributeError: - suite = SkippedSuite() - # doctest may gork the builtins dictionnary - # This happen to the "_" entry used by gettext - old_builtins = builtins.__dict__.copy() - try: - return suite.run(result) - finally: - builtins.__dict__.clear() - builtins.__dict__.update(old_builtins) - run = __call__ - - def test(self): - """just there to trigger test execution""" - - -class MockConnection: - """fake DB-API 2.0 connexion AND cursor (i.e. cursor() return self)""" - - def __init__(self, results): - self.received = [] - self.states = [] - self.results = results - - def cursor(self): - """Mock cursor method""" - return self - def execute(self, query, args=None): - """Mock execute method""" - self.received.append( (query, args) ) - def fetchone(self): - """Mock fetchone method""" - return self.results[0] - def fetchall(self): - """Mock fetchall method""" - return self.results - def commit(self): - """Mock commiy method""" - self.states.append( ('commit', len(self.received)) ) - def rollback(self): - """Mock rollback method""" - self.states.append( ('rollback', len(self.received)) ) - def close(self): - """Mock close method""" - pass - - -def mock_object(**params): - """creates an object using params to set attributes - >>> option = mock_object(verbose=False, index=range(5)) - >>> option.verbose - False - >>> option.index - [0, 1, 2, 3, 4] - """ - return type('Mock', (), params)() - - -def create_files(paths, chroot): - """Creates directories and files found in . - - :param paths: list of relative paths to files or directories - :param chroot: the root directory in which paths will be created - - >>> from os.path import isdir, isfile - >>> isdir('/tmp/a') - False - >>> create_files(['a/b/foo.py', 'a/b/c/', 'a/b/c/d/e.py'], '/tmp') - >>> isdir('/tmp/a') - True - >>> isdir('/tmp/a/b/c') - True - >>> isfile('/tmp/a/b/c/d/e.py') - True - >>> isfile('/tmp/a/b/foo.py') - True - """ - dirs, files = set(), set() - for path in paths: - path = osp.join(chroot, path) - filename = osp.basename(path) - # path is a directory path - if filename == '': - dirs.add(path) - # path is a filename path - else: - dirs.add(osp.dirname(path)) - files.add(path) - for dirpath in dirs: - if not osp.isdir(dirpath): - os.makedirs(dirpath) - for filepath in files: - open(filepath, 'w').close() - - -class AttrObject: # XXX cf mock_object - def __init__(self, **kwargs): - self.__dict__.update(kwargs) - -def tag(*args, **kwargs): - """descriptor adding tag to a function""" - def desc(func): - assert not hasattr(func, 'tags') - func.tags = Tags(*args, **kwargs) - return func - return desc - -def require_version(version): - """ Compare version of python interpreter to the given one. Skip the test - if older. - """ - def check_require_version(f): - version_elements = version.split('.') - try: - compare = tuple([int(v) for v in version_elements]) - except ValueError: - raise ValueError('%s is not a correct version : should be X.Y[.Z].' % version) - current = sys.version_info[:3] - if current < compare: - def new_f(self, *args, **kwargs): - self.skipTest('Need at least %s version of python. Current version is %s.' % (version, '.'.join([str(element) for element in current]))) - new_f.__name__ = f.__name__ - return new_f - else: - return f - return check_require_version - -def require_module(module): - """ Check if the given module is loaded. Skip the test if not. - """ - def check_require_module(f): - try: - __import__(module) - return f - except ImportError: - def new_f(self, *args, **kwargs): - self.skipTest('%s can not be imported.' % module) - new_f.__name__ = f.__name__ - return new_f - return check_require_module - diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/textutils.py b/pymode/libs/logilab-common-1.4.1/logilab/common/textutils.py deleted file mode 100644 index 356b1a89..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/textutils.py +++ /dev/null @@ -1,539 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Some text manipulation utility functions. - - -:group text formatting: normalize_text, normalize_paragraph, pretty_match,\ -unquote, colorize_ansi -:group text manipulation: searchall, splitstrip -:sort: text formatting, text manipulation - -:type ANSI_STYLES: dict(str) -:var ANSI_STYLES: dictionary mapping style identifier to ANSI terminal code - -:type ANSI_COLORS: dict(str) -:var ANSI_COLORS: dictionary mapping color identifier to ANSI terminal code - -:type ANSI_PREFIX: str -:var ANSI_PREFIX: - ANSI terminal code notifying the start of an ANSI escape sequence - -:type ANSI_END: str -:var ANSI_END: - ANSI terminal code notifying the end of an ANSI escape sequence - -:type ANSI_RESET: str -:var ANSI_RESET: - ANSI terminal code resetting format defined by a previous ANSI escape sequence -""" -__docformat__ = "restructuredtext en" - -import sys -import re -import os.path as osp -from warnings import warn -from unicodedata import normalize as _uninormalize -try: - from os import linesep -except ImportError: - linesep = '\n' # gae - -from logilab.common.deprecation import deprecated - -MANUAL_UNICODE_MAP = { - u'\xa1': u'!', # INVERTED EXCLAMATION MARK - u'\u0142': u'l', # LATIN SMALL LETTER L WITH STROKE - u'\u2044': u'/', # FRACTION SLASH - u'\xc6': u'AE', # LATIN CAPITAL LETTER AE - u'\xa9': u'(c)', # COPYRIGHT SIGN - u'\xab': u'"', # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK - u'\xe6': u'ae', # LATIN SMALL LETTER AE - u'\xae': u'(r)', # REGISTERED SIGN - u'\u0153': u'oe', # LATIN SMALL LIGATURE OE - u'\u0152': u'OE', # LATIN CAPITAL LIGATURE OE - u'\xd8': u'O', # LATIN CAPITAL LETTER O WITH STROKE - u'\xf8': u'o', # LATIN SMALL LETTER O WITH STROKE - u'\xbb': u'"', # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK - u'\xdf': u'ss', # LATIN SMALL LETTER SHARP S - u'\u2013': u'-', # HYPHEN - u'\u2019': u"'", # SIMPLE QUOTE - } - -def unormalize(ustring, ignorenonascii=None, substitute=None): - """replace diacritical characters with their corresponding ascii characters - - Convert the unicode string to its long normalized form (unicode character - will be transform into several characters) and keep the first one only. - The normal form KD (NFKD) will apply the compatibility decomposition, i.e. - replace all compatibility characters with their equivalents. - - :type substitute: str - :param substitute: replacement character to use if decomposition fails - - :see: Another project about ASCII transliterations of Unicode text - http://pypi.python.org/pypi/Unidecode - """ - # backward compatibility, ignorenonascii was a boolean - if ignorenonascii is not None: - warn("ignorenonascii is deprecated, use substitute named parameter instead", - DeprecationWarning, stacklevel=2) - if ignorenonascii: - substitute = '' - res = [] - for letter in ustring[:]: - try: - replacement = MANUAL_UNICODE_MAP[letter] - except KeyError: - replacement = _uninormalize('NFKD', letter)[0] - if ord(replacement) >= 2 ** 7: - if substitute is None: - raise ValueError("can't deal with non-ascii based characters") - replacement = substitute - res.append(replacement) - return u''.join(res) - -def unquote(string): - """remove optional quotes (simple or double) from the string - - :type string: str or unicode - :param string: an optionally quoted string - - :rtype: str or unicode - :return: the unquoted string (or the input string if it wasn't quoted) - """ - if not string: - return string - if string[0] in '"\'': - string = string[1:] - if string[-1] in '"\'': - string = string[:-1] - return string - - -_BLANKLINES_RGX = re.compile('\r?\n\r?\n') -_NORM_SPACES_RGX = re.compile('\s+') - -def normalize_text(text, line_len=80, indent='', rest=False): - """normalize a text to display it with a maximum line size and - optionally arbitrary indentation. Line jumps are normalized but blank - lines are kept. The indentation string may be used to insert a - comment (#) or a quoting (>) mark for instance. - - :type text: str or unicode - :param text: the input text to normalize - - :type line_len: int - :param line_len: expected maximum line's length, default to 80 - - :type indent: str or unicode - :param indent: optional string to use as indentation - - :rtype: str or unicode - :return: - the input text normalized to fit on lines with a maximized size - inferior to `line_len`, and optionally prefixed by an - indentation string - """ - if rest: - normp = normalize_rest_paragraph - else: - normp = normalize_paragraph - result = [] - for text in _BLANKLINES_RGX.split(text): - result.append(normp(text, line_len, indent)) - return ('%s%s%s' % (linesep, indent, linesep)).join(result) - - -def normalize_paragraph(text, line_len=80, indent=''): - """normalize a text to display it with a maximum line size and - optionally arbitrary indentation. Line jumps are normalized. The - indentation string may be used top insert a comment mark for - instance. - - :type text: str or unicode - :param text: the input text to normalize - - :type line_len: int - :param line_len: expected maximum line's length, default to 80 - - :type indent: str or unicode - :param indent: optional string to use as indentation - - :rtype: str or unicode - :return: - the input text normalized to fit on lines with a maximized size - inferior to `line_len`, and optionally prefixed by an - indentation string - """ - text = _NORM_SPACES_RGX.sub(' ', text) - line_len = line_len - len(indent) - lines = [] - while text: - aline, text = splittext(text.strip(), line_len) - lines.append(indent + aline) - return linesep.join(lines) - -def normalize_rest_paragraph(text, line_len=80, indent=''): - """normalize a ReST text to display it with a maximum line size and - optionally arbitrary indentation. Line jumps are normalized. The - indentation string may be used top insert a comment mark for - instance. - - :type text: str or unicode - :param text: the input text to normalize - - :type line_len: int - :param line_len: expected maximum line's length, default to 80 - - :type indent: str or unicode - :param indent: optional string to use as indentation - - :rtype: str or unicode - :return: - the input text normalized to fit on lines with a maximized size - inferior to `line_len`, and optionally prefixed by an - indentation string - """ - toreport = '' - lines = [] - line_len = line_len - len(indent) - for line in text.splitlines(): - line = toreport + _NORM_SPACES_RGX.sub(' ', line.strip()) - toreport = '' - while len(line) > line_len: - # too long line, need split - line, toreport = splittext(line, line_len) - lines.append(indent + line) - if toreport: - line = toreport + ' ' - toreport = '' - else: - line = '' - if line: - lines.append(indent + line.strip()) - return linesep.join(lines) - - -def splittext(text, line_len): - """split the given text on space according to the given max line size - - return a 2-uple: - * a line <= line_len if possible - * the rest of the text which has to be reported on another line - """ - if len(text) <= line_len: - return text, '' - pos = min(len(text)-1, line_len) - while pos > 0 and text[pos] != ' ': - pos -= 1 - if pos == 0: - pos = min(len(text), line_len) - while len(text) > pos and text[pos] != ' ': - pos += 1 - return text[:pos], text[pos+1:].strip() - - -def splitstrip(string, sep=','): - """return a list of stripped string by splitting the string given as - argument on `sep` (',' by default). Empty string are discarded. - - >>> splitstrip('a, b, c , 4,,') - ['a', 'b', 'c', '4'] - >>> splitstrip('a') - ['a'] - >>> - - :type string: str or unicode - :param string: a csv line - - :type sep: str or unicode - :param sep: field separator, default to the comma (',') - - :rtype: str or unicode - :return: the unquoted string (or the input string if it wasn't quoted) - """ - return [word.strip() for word in string.split(sep) if word.strip()] - -get_csv = deprecated('get_csv is deprecated, use splitstrip')(splitstrip) - - -def split_url_or_path(url_or_path): - """return the latest component of a string containing either an url of the - form :// or a local file system path - """ - if '://' in url_or_path: - return url_or_path.rstrip('/').rsplit('/', 1) - return osp.split(url_or_path.rstrip(osp.sep)) - - -def text_to_dict(text): - """parse multilines text containing simple 'key=value' lines and return a - dict of {'key': 'value'}. When the same key is encountered multiple time, - value is turned into a list containing all values. - - >>> d = text_to_dict('''multiple=1 - ... multiple= 2 - ... single =3 - ... ''') - >>> d['single'] - '3' - >>> d['multiple'] - ['1', '2'] - - """ - res = {} - if not text: - return res - for line in text.splitlines(): - line = line.strip() - if line and not line.startswith('#'): - key, value = [w.strip() for w in line.split('=', 1)] - if key in res: - try: - res[key].append(value) - except AttributeError: - res[key] = [res[key], value] - else: - res[key] = value - return res - - -_BLANK_URE = r'(\s|,)+' -_BLANK_RE = re.compile(_BLANK_URE) -__VALUE_URE = r'-?(([0-9]+\.[0-9]*)|((0x?)?[0-9]+))' -__UNITS_URE = r'[a-zA-Z]+' -_VALUE_RE = re.compile(r'(?P%s)(?P%s)?'%(__VALUE_URE, __UNITS_URE)) -_VALIDATION_RE = re.compile(r'^((%s)(%s))*(%s)?$' % (__VALUE_URE, __UNITS_URE, - __VALUE_URE)) - -BYTE_UNITS = { - "b": 1, - "kb": 1024, - "mb": 1024 ** 2, - "gb": 1024 ** 3, - "tb": 1024 ** 4, -} - -TIME_UNITS = { - "ms": 0.0001, - "s": 1, - "min": 60, - "h": 60 * 60, - "d": 60 * 60 *24, -} - -def apply_units(string, units, inter=None, final=float, blank_reg=_BLANK_RE, - value_reg=_VALUE_RE): - """Parse the string applying the units defined in units - (e.g.: "1.5m",{'m',60} -> 80). - - :type string: str or unicode - :param string: the string to parse - - :type units: dict (or any object with __getitem__ using basestring key) - :param units: a dict mapping a unit string repr to its value - - :type inter: type - :param inter: used to parse every intermediate value (need __sum__) - - :type blank_reg: regexp - :param blank_reg: should match every blank char to ignore. - - :type value_reg: regexp with "value" and optional "unit" group - :param value_reg: match a value and it's unit into the - """ - if inter is None: - inter = final - fstring = _BLANK_RE.sub('', string) - if not (fstring and _VALIDATION_RE.match(fstring)): - raise ValueError("Invalid unit string: %r." % string) - values = [] - for match in value_reg.finditer(fstring): - dic = match.groupdict() - lit, unit = dic["value"], dic.get("unit") - value = inter(lit) - if unit is not None: - try: - value *= units[unit.lower()] - except KeyError: - raise KeyError('invalid unit %s. valid units are %s' % - (unit, units.keys())) - values.append(value) - return final(sum(values)) - - -_LINE_RGX = re.compile('\r\n|\r+|\n') - -def pretty_match(match, string, underline_char='^'): - """return a string with the match location underlined: - - >>> import re - >>> print(pretty_match(re.search('mange', 'il mange du bacon'), 'il mange du bacon')) - il mange du bacon - ^^^^^ - >>> - - :type match: _sre.SRE_match - :param match: object returned by re.match, re.search or re.finditer - - :type string: str or unicode - :param string: - the string on which the regular expression has been applied to - obtain the `match` object - - :type underline_char: str or unicode - :param underline_char: - character to use to underline the matched section, default to the - carret '^' - - :rtype: str or unicode - :return: - the original string with an inserted line to underline the match - location - """ - start = match.start() - end = match.end() - string = _LINE_RGX.sub(linesep, string) - start_line_pos = string.rfind(linesep, 0, start) - if start_line_pos == -1: - start_line_pos = 0 - result = [] - else: - result = [string[:start_line_pos]] - start_line_pos += len(linesep) - offset = start - start_line_pos - underline = ' ' * offset + underline_char * (end - start) - end_line_pos = string.find(linesep, end) - if end_line_pos == -1: - string = string[start_line_pos:] - result.append(string) - result.append(underline) - else: - end = string[end_line_pos + len(linesep):] - string = string[start_line_pos:end_line_pos] - result.append(string) - result.append(underline) - result.append(end) - return linesep.join(result).rstrip() - - -# Ansi colorization ########################################################### - -ANSI_PREFIX = '\033[' -ANSI_END = 'm' -ANSI_RESET = '\033[0m' -ANSI_STYLES = { - 'reset': "0", - 'bold': "1", - 'italic': "3", - 'underline': "4", - 'blink': "5", - 'inverse': "7", - 'strike': "9", -} -ANSI_COLORS = { - 'reset': "0", - 'black': "30", - 'red': "31", - 'green': "32", - 'yellow': "33", - 'blue': "34", - 'magenta': "35", - 'cyan': "36", - 'white': "37", -} - -def _get_ansi_code(color=None, style=None): - """return ansi escape code corresponding to color and style - - :type color: str or None - :param color: - the color name (see `ANSI_COLORS` for available values) - or the color number when 256 colors are available - - :type style: str or None - :param style: - style string (see `ANSI_COLORS` for available values). To get - several style effects at the same time, use a coma as separator. - - :raise KeyError: if an unexistent color or style identifier is given - - :rtype: str - :return: the built escape code - """ - ansi_code = [] - if style: - style_attrs = splitstrip(style) - for effect in style_attrs: - ansi_code.append(ANSI_STYLES[effect]) - if color: - if color.isdigit(): - ansi_code.extend(['38', '5']) - ansi_code.append(color) - else: - ansi_code.append(ANSI_COLORS[color]) - if ansi_code: - return ANSI_PREFIX + ';'.join(ansi_code) + ANSI_END - return '' - -def colorize_ansi(msg, color=None, style=None): - """colorize message by wrapping it with ansi escape codes - - :type msg: str or unicode - :param msg: the message string to colorize - - :type color: str or None - :param color: - the color identifier (see `ANSI_COLORS` for available values) - - :type style: str or None - :param style: - style string (see `ANSI_COLORS` for available values). To get - several style effects at the same time, use a coma as separator. - - :raise KeyError: if an unexistent color or style identifier is given - - :rtype: str or unicode - :return: the ansi escaped string - """ - # If both color and style are not defined, then leave the text as is - if color is None and style is None: - return msg - escape_code = _get_ansi_code(color, style) - # If invalid (or unknown) color, don't wrap msg with ansi codes - if escape_code: - return '%s%s%s' % (escape_code, msg, ANSI_RESET) - return msg - -DIFF_STYLE = {'separator': 'cyan', 'remove': 'red', 'add': 'green'} - -def diff_colorize_ansi(lines, out=sys.stdout, style=DIFF_STYLE): - for line in lines: - if line[:4] in ('--- ', '+++ '): - out.write(colorize_ansi(line, style['separator'])) - elif line[0] == '-': - out.write(colorize_ansi(line, style['remove'])) - elif line[0] == '+': - out.write(colorize_ansi(line, style['add'])) - elif line[:4] == '--- ': - out.write(colorize_ansi(line, style['separator'])) - elif line[:4] == '+++ ': - out.write(colorize_ansi(line, style['separator'])) - else: - out.write(line) - diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/tree.py b/pymode/libs/logilab-common-1.4.1/logilab/common/tree.py deleted file mode 100644 index 885eb0fa..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/tree.py +++ /dev/null @@ -1,369 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Base class to represent a tree structure. - - - - -""" -__docformat__ = "restructuredtext en" - -import sys - -from logilab.common import flatten -from logilab.common.visitor import VisitedMixIn, FilteredIterator, no_filter - -## Exceptions ################################################################# - -class NodeNotFound(Exception): - """raised when a node has not been found""" - -EX_SIBLING_NOT_FOUND = "No such sibling as '%s'" -EX_CHILD_NOT_FOUND = "No such child as '%s'" -EX_NODE_NOT_FOUND = "No such node as '%s'" - - -# Base node ################################################################### - -class Node(object): - """a basic tree node, characterized by an id""" - - def __init__(self, nid=None) : - self.id = nid - # navigation - self.parent = None - self.children = [] - - def __iter__(self): - return iter(self.children) - - def __str__(self, indent=0): - s = ['%s%s %s' % (' '*indent, self.__class__.__name__, self.id)] - indent += 2 - for child in self.children: - try: - s.append(child.__str__(indent)) - except TypeError: - s.append(child.__str__()) - return '\n'.join(s) - - def is_leaf(self): - return not self.children - - def append(self, child): - """add a node to children""" - self.children.append(child) - child.parent = self - - def remove(self, child): - """remove a child node""" - self.children.remove(child) - child.parent = None - - def insert(self, index, child): - """insert a child node""" - self.children.insert(index, child) - child.parent = self - - def replace(self, old_child, new_child): - """replace a child node with another""" - i = self.children.index(old_child) - self.children.pop(i) - self.children.insert(i, new_child) - new_child.parent = self - - def get_sibling(self, nid): - """return the sibling node that has given id""" - try: - return self.parent.get_child_by_id(nid) - except NodeNotFound : - raise NodeNotFound(EX_SIBLING_NOT_FOUND % nid) - - def next_sibling(self): - """ - return the next sibling for this node if any - """ - parent = self.parent - if parent is None: - # root node has no sibling - return None - index = parent.children.index(self) - try: - return parent.children[index+1] - except IndexError: - return None - - def previous_sibling(self): - """ - return the previous sibling for this node if any - """ - parent = self.parent - if parent is None: - # root node has no sibling - return None - index = parent.children.index(self) - if index > 0: - return parent.children[index-1] - return None - - def get_node_by_id(self, nid): - """ - return node in whole hierarchy that has given id - """ - root = self.root() - try: - return root.get_child_by_id(nid, 1) - except NodeNotFound : - raise NodeNotFound(EX_NODE_NOT_FOUND % nid) - - def get_child_by_id(self, nid, recurse=None): - """ - return child of given id - """ - if self.id == nid: - return self - for c in self.children : - if recurse: - try: - return c.get_child_by_id(nid, 1) - except NodeNotFound : - continue - if c.id == nid : - return c - raise NodeNotFound(EX_CHILD_NOT_FOUND % nid) - - def get_child_by_path(self, path): - """ - return child of given path (path is a list of ids) - """ - if len(path) > 0 and path[0] == self.id: - if len(path) == 1 : - return self - else : - for c in self.children : - try: - return c.get_child_by_path(path[1:]) - except NodeNotFound : - pass - raise NodeNotFound(EX_CHILD_NOT_FOUND % path) - - def depth(self): - """ - return depth of this node in the tree - """ - if self.parent is not None: - return 1 + self.parent.depth() - else : - return 0 - - def depth_down(self): - """ - return depth of the tree from this node - """ - if self.children: - return 1 + max([c.depth_down() for c in self.children]) - return 1 - - def width(self): - """ - return the width of the tree from this node - """ - return len(self.leaves()) - - def root(self): - """ - return the root node of the tree - """ - if self.parent is not None: - return self.parent.root() - return self - - def leaves(self): - """ - return a list with all the leaves nodes descendant from this node - """ - leaves = [] - if self.children: - for child in self.children: - leaves += child.leaves() - return leaves - else: - return [self] - - def flatten(self, _list=None): - """ - return a list with all the nodes descendant from this node - """ - if _list is None: - _list = [] - _list.append(self) - for c in self.children: - c.flatten(_list) - return _list - - def lineage(self): - """ - return list of parents up to root node - """ - lst = [self] - if self.parent is not None: - lst.extend(self.parent.lineage()) - return lst - -class VNode(Node, VisitedMixIn): - """a visitable node - """ - pass - - -class BinaryNode(VNode): - """a binary node (i.e. only two children - """ - def __init__(self, lhs=None, rhs=None) : - VNode.__init__(self) - if lhs is not None or rhs is not None: - assert lhs and rhs - self.append(lhs) - self.append(rhs) - - def remove(self, child): - """remove the child and replace this node with the other child - """ - self.children.remove(child) - self.parent.replace(self, self.children[0]) - - def get_parts(self): - """ - return the left hand side and the right hand side of this node - """ - return self.children[0], self.children[1] - - - -if sys.version_info[0:2] >= (2, 2): - list_class = list -else: - from UserList import UserList - list_class = UserList - -class ListNode(VNode, list_class): - """Used to manipulate Nodes as Lists - """ - def __init__(self): - list_class.__init__(self) - VNode.__init__(self) - self.children = self - - def __str__(self, indent=0): - return '%s%s %s' % (indent*' ', self.__class__.__name__, - ', '.join([str(v) for v in self])) - - def append(self, child): - """add a node to children""" - list_class.append(self, child) - child.parent = self - - def insert(self, index, child): - """add a node to children""" - list_class.insert(self, index, child) - child.parent = self - - def remove(self, child): - """add a node to children""" - list_class.remove(self, child) - child.parent = None - - def pop(self, index): - """add a node to children""" - child = list_class.pop(self, index) - child.parent = None - - def __iter__(self): - return list_class.__iter__(self) - -# construct list from tree #################################################### - -def post_order_list(node, filter_func=no_filter): - """ - create a list with tree nodes for which the function returned true - in a post order fashion - """ - l, stack = [], [] - poped, index = 0, 0 - while node: - if filter_func(node): - if node.children and not poped: - stack.append((node, index)) - index = 0 - node = node.children[0] - else: - l.append(node) - index += 1 - try: - node = stack[-1][0].children[index] - except IndexError: - node = None - else: - node = None - poped = 0 - if node is None and stack: - node, index = stack.pop() - poped = 1 - return l - -def pre_order_list(node, filter_func=no_filter): - """ - create a list with tree nodes for which the function returned true - in a pre order fashion - """ - l, stack = [], [] - poped, index = 0, 0 - while node: - if filter_func(node): - if not poped: - l.append(node) - if node.children and not poped: - stack.append((node, index)) - index = 0 - node = node.children[0] - else: - index += 1 - try: - node = stack[-1][0].children[index] - except IndexError: - node = None - else: - node = None - poped = 0 - if node is None and len(stack) > 1: - node, index = stack.pop() - poped = 1 - return l - -class PostfixedDepthFirstIterator(FilteredIterator): - """a postfixed depth first iterator, designed to be used with visitors - """ - def __init__(self, node, filter_func=None): - FilteredIterator.__init__(self, node, post_order_list, filter_func) - -class PrefixedDepthFirstIterator(FilteredIterator): - """a prefixed depth first iterator, designed to be used with visitors - """ - def __init__(self, node, filter_func=None): - FilteredIterator.__init__(self, node, pre_order_list, filter_func) - diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/umessage.py b/pymode/libs/logilab-common-1.4.1/logilab/common/umessage.py deleted file mode 100644 index a0394bc6..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/umessage.py +++ /dev/null @@ -1,177 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Unicode email support (extends email from stdlib)""" - -__docformat__ = "restructuredtext en" - -import email -from encodings import search_function -import sys -from email.utils import parseaddr, parsedate -from email.header import decode_header - -from datetime import datetime - -from six import text_type, binary_type - -try: - from mx.DateTime import DateTime -except ImportError: - DateTime = datetime - -import logilab.common as lgc - - -def decode_QP(string): - parts = [] - for decoded, charset in decode_header(string): - if not charset : - charset = 'iso-8859-15' - # python 3 sometimes returns str and sometimes bytes. - # the 'official' fix is to use the new 'policy' APIs - # https://bugs.python.org/issue24797 - # let's just handle this bug ourselves for now - if isinstance(decoded, binary_type): - decoded = decoded.decode(charset, 'replace') - assert isinstance(decoded, text_type) - parts.append(decoded) - - if sys.version_info < (3, 3): - # decoding was non-RFC compliant wrt to whitespace handling - # see http://bugs.python.org/issue1079 - return u' '.join(parts) - return u''.join(parts) - -def message_from_file(fd): - try: - return UMessage(email.message_from_file(fd)) - except email.errors.MessageParseError: - return '' - -def message_from_string(string): - try: - return UMessage(email.message_from_string(string)) - except email.errors.MessageParseError: - return '' - -class UMessage: - """Encapsulates an email.Message instance and returns only unicode objects. - """ - - def __init__(self, message): - self.message = message - - # email.Message interface ################################################# - - def get(self, header, default=None): - value = self.message.get(header, default) - if value: - return decode_QP(value) - return value - - def __getitem__(self, header): - return self.get(header) - - def get_all(self, header, default=()): - return [decode_QP(val) for val in self.message.get_all(header, default) - if val is not None] - - def is_multipart(self): - return self.message.is_multipart() - - def get_boundary(self): - return self.message.get_boundary() - - def walk(self): - for part in self.message.walk(): - yield UMessage(part) - - def get_payload(self, index=None, decode=False): - message = self.message - if index is None: - payload = message.get_payload(index, decode) - if isinstance(payload, list): - return [UMessage(msg) for msg in payload] - if message.get_content_maintype() != 'text': - return payload - if isinstance(payload, text_type): - return payload - - charset = message.get_content_charset() or 'iso-8859-1' - if search_function(charset) is None: - charset = 'iso-8859-1' - return text_type(payload or b'', charset, "replace") - else: - payload = UMessage(message.get_payload(index, decode)) - return payload - - def get_content_maintype(self): - return text_type(self.message.get_content_maintype()) - - def get_content_type(self): - return text_type(self.message.get_content_type()) - - def get_filename(self, failobj=None): - value = self.message.get_filename(failobj) - if value is failobj: - return value - try: - return text_type(value) - except UnicodeDecodeError: - return u'error decoding filename' - - # other convenience methods ############################################### - - def headers(self): - """return an unicode string containing all the message's headers""" - values = [] - for header in self.message.keys(): - values.append(u'%s: %s' % (header, self.get(header))) - return '\n'.join(values) - - def multi_addrs(self, header): - """return a list of 2-uple (name, address) for the given address (which - is expected to be an header containing address such as from, to, cc...) - """ - persons = [] - for person in self.get_all(header, ()): - name, mail = parseaddr(person) - persons.append((name, mail)) - return persons - - def date(self, alternative_source=False, return_str=False): - """return a datetime object for the email's date or None if no date is - set or if it can't be parsed - """ - value = self.get('date') - if value is None and alternative_source: - unix_from = self.message.get_unixfrom() - if unix_from is not None: - try: - value = unix_from.split(" ", 2)[2] - except IndexError: - pass - if value is not None: - datetuple = parsedate(value) - if datetuple: - if lgc.USE_MX_DATETIME: - return DateTime(*datetuple[:6]) - return datetime(*datetuple[:6]) - elif not return_str: - return None - return value diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/ureports/__init__.py b/pymode/libs/logilab-common-1.4.1/logilab/common/ureports/__init__.py deleted file mode 100644 index d76ebe52..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/ureports/__init__.py +++ /dev/null @@ -1,172 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Universal report objects and some formatting drivers. - -A way to create simple reports using python objects, primarily designed to be -formatted as text and html. -""" -__docformat__ = "restructuredtext en" - -import sys - -from logilab.common.compat import StringIO -from logilab.common.textutils import linesep - - -def get_nodes(node, klass): - """return an iterator on all children node of the given klass""" - for child in node.children: - if isinstance(child, klass): - yield child - # recurse (FIXME: recursion controled by an option) - for grandchild in get_nodes(child, klass): - yield grandchild - -def layout_title(layout): - """try to return the layout's title as string, return None if not found - """ - for child in layout.children: - if isinstance(child, Title): - return u' '.join([node.data for node in get_nodes(child, Text)]) - -def build_summary(layout, level=1): - """make a summary for the report, including X level""" - assert level > 0 - level -= 1 - summary = List(klass=u'summary') - for child in layout.children: - if not isinstance(child, Section): - continue - label = layout_title(child) - if not label and not child.id: - continue - if not child.id: - child.id = label.replace(' ', '-') - node = Link(u'#'+child.id, label=label or child.id) - # FIXME: Three following lines produce not very compliant - # docbook: there are some useless . They might be - # replaced by the three commented lines but this then produces - # a bug in html display... - if level and [n for n in child.children if isinstance(n, Section)]: - node = Paragraph([node, build_summary(child, level)]) - summary.append(node) -# summary.append(node) -# if level and [n for n in child.children if isinstance(n, Section)]: -# summary.append(build_summary(child, level)) - return summary - - -class BaseWriter(object): - """base class for ureport writers""" - - def format(self, layout, stream=None, encoding=None): - """format and write the given layout into the stream object - - unicode policy: unicode strings may be found in the layout; - try to call stream.write with it, but give it back encoded using - the given encoding if it fails - """ - if stream is None: - stream = sys.stdout - if not encoding: - encoding = getattr(stream, 'encoding', 'UTF-8') - self.encoding = encoding or 'UTF-8' - self.__compute_funcs = [] - self.out = stream - self.begin_format(layout) - layout.accept(self) - self.end_format(layout) - - def format_children(self, layout): - """recurse on the layout children and call their accept method - (see the Visitor pattern) - """ - for child in getattr(layout, 'children', ()): - child.accept(self) - - def writeln(self, string=u''): - """write a line in the output buffer""" - self.write(string + linesep) - - def write(self, string): - """write a string in the output buffer""" - try: - self.out.write(string) - except UnicodeEncodeError: - self.out.write(string.encode(self.encoding)) - - def begin_format(self, layout): - """begin to format a layout""" - self.section = 0 - - def end_format(self, layout): - """finished to format a layout""" - - def get_table_content(self, table): - """trick to get table content without actually writing it - - return an aligned list of lists containing table cells values as string - """ - result = [[]] - cols = table.cols - for cell in self.compute_content(table): - if cols == 0: - result.append([]) - cols = table.cols - cols -= 1 - result[-1].append(cell) - # fill missing cells - while len(result[-1]) < cols: - result[-1].append(u'') - return result - - def compute_content(self, layout): - """trick to compute the formatting of children layout before actually - writing it - - return an iterator on strings (one for each child element) - """ - # use cells ! - def write(data): - try: - stream.write(data) - except UnicodeEncodeError: - stream.write(data.encode(self.encoding)) - def writeln(data=u''): - try: - stream.write(data+linesep) - except UnicodeEncodeError: - stream.write(data.encode(self.encoding)+linesep) - self.write = write - self.writeln = writeln - self.__compute_funcs.append((write, writeln)) - for child in layout.children: - stream = StringIO() - child.accept(self) - yield stream.getvalue() - self.__compute_funcs.pop() - try: - self.write, self.writeln = self.__compute_funcs[-1] - except IndexError: - del self.write - del self.writeln - - -from logilab.common.ureports.nodes import * -from logilab.common.ureports.text_writer import TextWriter -from logilab.common.ureports.html_writer import HTMLWriter diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/ureports/docbook_writer.py b/pymode/libs/logilab-common-1.4.1/logilab/common/ureports/docbook_writer.py deleted file mode 100644 index 857068c8..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/ureports/docbook_writer.py +++ /dev/null @@ -1,140 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""HTML formatting drivers for ureports""" -__docformat__ = "restructuredtext en" - -from six.moves import range - -from logilab.common.ureports import HTMLWriter - -class DocbookWriter(HTMLWriter): - """format layouts as HTML""" - - def begin_format(self, layout): - """begin to format a layout""" - super(HTMLWriter, self).begin_format(layout) - if self.snippet is None: - self.writeln('') - self.writeln(""" - -""") - - def end_format(self, layout): - """finished to format a layout""" - if self.snippet is None: - self.writeln('') - - def visit_section(self, layout): - """display a section (using (level 0) or
)""" - if self.section == 0: - tag = "chapter" - else: - tag = "section" - self.section += 1 - self.writeln(self._indent('<%s%s>' % (tag, self.handle_attrs(layout)))) - self.format_children(layout) - self.writeln(self._indent(''% tag)) - self.section -= 1 - - def visit_title(self, layout): - """display a title using Codestin Search App') - - def visit_table(self, layout): - """display a table as html""" - self.writeln(self._indent(' Codestin Search App' \ - % (self.handle_attrs(layout), layout.title))) - self.writeln(self._indent(' '% layout.cols)) - for i in range(layout.cols): - self.writeln(self._indent(' ' % i)) - - table_content = self.get_table_content(layout) - # write headers - if layout.cheaders: - self.writeln(self._indent(' ')) - self._write_row(table_content[0]) - self.writeln(self._indent(' ')) - table_content = table_content[1:] - elif layout.rcheaders: - self.writeln(self._indent(' ')) - self._write_row(table_content[-1]) - self.writeln(self._indent(' ')) - table_content = table_content[:-1] - # write body - self.writeln(self._indent(' ')) - for i in range(len(table_content)): - row = table_content[i] - self.writeln(self._indent(' ')) - for j in range(len(row)): - cell = row[j] or ' ' - self.writeln(self._indent(' %s' % cell)) - self.writeln(self._indent(' ')) - self.writeln(self._indent(' ')) - self.writeln(self._indent(' ')) - self.writeln(self._indent(' ')) - - def _write_row(self, row): - """write content of row (using )""" - self.writeln(' ') - for j in range(len(row)): - cell = row[j] or ' ' - self.writeln(' %s' % cell) - self.writeln(self._indent(' ')) - - def visit_list(self, layout): - """display a list (using )""" - self.writeln(self._indent(' ' % self.handle_attrs(layout))) - for row in list(self.compute_content(layout)): - self.writeln(' %s' % row) - self.writeln(self._indent(' ')) - - def visit_paragraph(self, layout): - """display links (using )""" - self.write(self._indent(' ')) - self.format_children(layout) - self.writeln('') - - def visit_span(self, layout): - """display links (using

)""" - #TODO: translate in docbook - self.write('' % self.handle_attrs(layout)) - self.format_children(layout) - self.write('') - - def visit_link(self, layout): - """display links (using )""" - self.write('%s' % (layout.url, - self.handle_attrs(layout), - layout.label)) - - def visit_verbatimtext(self, layout): - """display verbatim text (using )""" - self.writeln(self._indent(' ')) - self.write(layout.data.replace('&', '&').replace('<', '<')) - self.writeln(self._indent(' ')) - - def visit_text(self, layout): - """add some text""" - self.write(layout.data.replace('&', '&').replace('<', '<')) - - def _indent(self, string): - """correctly indent string according to section""" - return ' ' * 2*(self.section) + string diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/ureports/html_writer.py b/pymode/libs/logilab-common-1.4.1/logilab/common/ureports/html_writer.py deleted file mode 100644 index eba34ea4..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/ureports/html_writer.py +++ /dev/null @@ -1,133 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""HTML formatting drivers for ureports""" -__docformat__ = "restructuredtext en" - -from cgi import escape - -from six.moves import range - -from logilab.common.ureports import BaseWriter - - -class HTMLWriter(BaseWriter): - """format layouts as HTML""" - - def __init__(self, snippet=None): - super(HTMLWriter, self).__init__() - self.snippet = snippet - - def handle_attrs(self, layout): - """get an attribute string from layout member attributes""" - attrs = u'' - klass = getattr(layout, 'klass', None) - if klass: - attrs += u' class="%s"' % klass - nid = getattr(layout, 'id', None) - if nid: - attrs += u' id="%s"' % nid - return attrs - - def begin_format(self, layout): - """begin to format a layout""" - super(HTMLWriter, self).begin_format(layout) - if self.snippet is None: - self.writeln(u'') - self.writeln(u'') - - def end_format(self, layout): - """finished to format a layout""" - if self.snippet is None: - self.writeln(u'') - self.writeln(u'') - - - def visit_section(self, layout): - """display a section as html, using div + h[section level]""" - self.section += 1 - self.writeln(u'' % self.handle_attrs(layout)) - self.format_children(layout) - self.writeln(u'') - self.section -= 1 - - def visit_title(self, layout): - """display a title using """ - self.write(u'' % (self.section, self.handle_attrs(layout))) - self.format_children(layout) - self.writeln(u'' % self.section) - - def visit_table(self, layout): - """display a table as html""" - self.writeln(u'' % self.handle_attrs(layout)) - table_content = self.get_table_content(layout) - for i in range(len(table_content)): - row = table_content[i] - if i == 0 and layout.rheaders: - self.writeln(u'') - elif i+1 == len(table_content) and layout.rrheaders: - self.writeln(u'') - else: - self.writeln(u'' % (i%2 and 'even' or 'odd')) - for j in range(len(row)): - cell = row[j] or u' ' - if (layout.rheaders and i == 0) or \ - (layout.cheaders and j == 0) or \ - (layout.rrheaders and i+1 == len(table_content)) or \ - (layout.rcheaders and j+1 == len(row)): - self.writeln(u'%s' % cell) - else: - self.writeln(u'%s' % cell) - self.writeln(u'') - self.writeln(u'') - - def visit_list(self, layout): - """display a list as html""" - self.writeln(u'' % self.handle_attrs(layout)) - for row in list(self.compute_content(layout)): - self.writeln(u'

  • %s
  • ' % row) - self.writeln(u'') - - def visit_paragraph(self, layout): - """display links (using

    )""" - self.write(u'

    ') - self.format_children(layout) - self.write(u'

    ') - - def visit_span(self, layout): - """display links (using

    )""" - self.write(u'' % self.handle_attrs(layout)) - self.format_children(layout) - self.write(u'') - - def visit_link(self, layout): - """display links (using )""" - self.write(u' %s' % (layout.url, - self.handle_attrs(layout), - layout.label)) - def visit_verbatimtext(self, layout): - """display verbatim text (using

    )"""
    -        self.write(u'
    ')
    -        self.write(layout.data.replace(u'&', u'&').replace(u'<', u'<'))
    -        self.write(u'
    ') - - def visit_text(self, layout): - """add some text""" - data = layout.data - if layout.escaped: - data = data.replace(u'&', u'&').replace(u'<', u'<') - self.write(data) diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/ureports/nodes.py b/pymode/libs/logilab-common-1.4.1/logilab/common/ureports/nodes.py deleted file mode 100644 index a9585b30..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/ureports/nodes.py +++ /dev/null @@ -1,203 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Micro reports objects. - -A micro report is a tree of layout and content objects. -""" -__docformat__ = "restructuredtext en" - -from logilab.common.tree import VNode - -from six import string_types - -class BaseComponent(VNode): - """base report component - - attributes - * id : the component's optional id - * klass : the component's optional klass - """ - def __init__(self, id=None, klass=None): - VNode.__init__(self, id) - self.klass = klass - -class BaseLayout(BaseComponent): - """base container node - - attributes - * BaseComponent attributes - * children : components in this table (i.e. the table's cells) - """ - def __init__(self, children=(), **kwargs): - super(BaseLayout, self).__init__(**kwargs) - for child in children: - if isinstance(child, BaseComponent): - self.append(child) - else: - self.add_text(child) - - def append(self, child): - """overridden to detect problems easily""" - assert child not in self.parents() - VNode.append(self, child) - - def parents(self): - """return the ancestor nodes""" - assert self.parent is not self - if self.parent is None: - return [] - return [self.parent] + self.parent.parents() - - def add_text(self, text): - """shortcut to add text data""" - self.children.append(Text(text)) - - -# non container nodes ######################################################### - -class Text(BaseComponent): - """a text portion - - attributes : - * BaseComponent attributes - * data : the text value as an encoded or unicode string - """ - def __init__(self, data, escaped=True, **kwargs): - super(Text, self).__init__(**kwargs) - #if isinstance(data, unicode): - # data = data.encode('ascii') - assert isinstance(data, string_types), data.__class__ - self.escaped = escaped - self.data = data - -class VerbatimText(Text): - """a verbatim text, display the raw data - - attributes : - * BaseComponent attributes - * data : the text value as an encoded or unicode string - """ - -class Link(BaseComponent): - """a labelled link - - attributes : - * BaseComponent attributes - * url : the link's target (REQUIRED) - * label : the link's label as a string (use the url by default) - """ - def __init__(self, url, label=None, **kwargs): - super(Link, self).__init__(**kwargs) - assert url - self.url = url - self.label = label or url - - -class Image(BaseComponent): - """an embedded or a single image - - attributes : - * BaseComponent attributes - * filename : the image's filename (REQUIRED) - * stream : the stream object containing the image data (REQUIRED) - * title : the image's optional title - """ - def __init__(self, filename, stream, title=None, **kwargs): - super(Image, self).__init__(**kwargs) - assert filename - assert stream - self.filename = filename - self.stream = stream - self.title = title - - -# container nodes ############################################################# - -class Section(BaseLayout): - """a section - - attributes : - * BaseLayout attributes - - a title may also be given to the constructor, it'll be added - as a first element - a description may also be given to the constructor, it'll be added - as a first paragraph - """ - def __init__(self, title=None, description=None, **kwargs): - super(Section, self).__init__(**kwargs) - if description: - self.insert(0, Paragraph([Text(description)])) - if title: - self.insert(0, Title(children=(title,))) - -class Title(BaseLayout): - """a title - - attributes : - * BaseLayout attributes - - A title must not contains a section nor a paragraph! - """ - -class Span(BaseLayout): - """a title - - attributes : - * BaseLayout attributes - - A span should only contains Text and Link nodes (in-line elements) - """ - -class Paragraph(BaseLayout): - """a simple text paragraph - - attributes : - * BaseLayout attributes - - A paragraph must not contains a section ! - """ - -class Table(BaseLayout): - """some tabular data - - attributes : - * BaseLayout attributes - * cols : the number of columns of the table (REQUIRED) - * rheaders : the first row's elements are table's header - * cheaders : the first col's elements are table's header - * title : the table's optional title - """ - def __init__(self, cols, title=None, - rheaders=0, cheaders=0, rrheaders=0, rcheaders=0, - **kwargs): - super(Table, self).__init__(**kwargs) - assert isinstance(cols, int) - self.cols = cols - self.title = title - self.rheaders = rheaders - self.cheaders = cheaders - self.rrheaders = rrheaders - self.rcheaders = rcheaders - -class List(BaseLayout): - """some list data - - attributes : - * BaseLayout attributes - """ diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/ureports/text_writer.py b/pymode/libs/logilab-common-1.4.1/logilab/common/ureports/text_writer.py deleted file mode 100644 index c87613c9..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/ureports/text_writer.py +++ /dev/null @@ -1,145 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Text formatting drivers for ureports""" - -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -from six.moves import range - -from logilab.common.textutils import linesep -from logilab.common.ureports import BaseWriter - - -TITLE_UNDERLINES = [u'', u'=', u'-', u'`', u'.', u'~', u'^'] -BULLETS = [u'*', u'-'] - -class TextWriter(BaseWriter): - """format layouts as text - (ReStructured inspiration but not totally handled yet) - """ - def begin_format(self, layout): - super(TextWriter, self).begin_format(layout) - self.list_level = 0 - self.pending_urls = [] - - def visit_section(self, layout): - """display a section as text - """ - self.section += 1 - self.writeln() - self.format_children(layout) - if self.pending_urls: - self.writeln() - for label, url in self.pending_urls: - self.writeln(u'.. _`%s`: %s' % (label, url)) - self.pending_urls = [] - self.section -= 1 - self.writeln() - - def visit_title(self, layout): - title = u''.join(list(self.compute_content(layout))) - self.writeln(title) - try: - self.writeln(TITLE_UNDERLINES[self.section] * len(title)) - except IndexError: - print("FIXME TITLE TOO DEEP. TURNING TITLE INTO TEXT") - - def visit_paragraph(self, layout): - """enter a paragraph""" - self.format_children(layout) - self.writeln() - - def visit_span(self, layout): - """enter a span""" - self.format_children(layout) - - def visit_table(self, layout): - """display a table as text""" - table_content = self.get_table_content(layout) - # get columns width - cols_width = [0]*len(table_content[0]) - for row in table_content: - for index in range(len(row)): - col = row[index] - cols_width[index] = max(cols_width[index], len(col)) - if layout.klass == 'field': - self.field_table(layout, table_content, cols_width) - else: - self.default_table(layout, table_content, cols_width) - self.writeln() - - def default_table(self, layout, table_content, cols_width): - """format a table""" - cols_width = [size+1 for size in cols_width] - format_strings = u' '.join([u'%%-%ss'] * len(cols_width)) - format_strings = format_strings % tuple(cols_width) - format_strings = format_strings.split(' ') - table_linesep = u'\n+' + u'+'.join([u'-'*w for w in cols_width]) + u'+\n' - headsep = u'\n+' + u'+'.join([u'='*w for w in cols_width]) + u'+\n' - # FIXME: layout.cheaders - self.write(table_linesep) - for i in range(len(table_content)): - self.write(u'|') - line = table_content[i] - for j in range(len(line)): - self.write(format_strings[j] % line[j]) - self.write(u'|') - if i == 0 and layout.rheaders: - self.write(headsep) - else: - self.write(table_linesep) - - def field_table(self, layout, table_content, cols_width): - """special case for field table""" - assert layout.cols == 2 - format_string = u'%s%%-%ss: %%s' % (linesep, cols_width[0]) - for field, value in table_content: - self.write(format_string % (field, value)) - - - def visit_list(self, layout): - """display a list layout as text""" - bullet = BULLETS[self.list_level % len(BULLETS)] - indent = ' ' * self.list_level - self.list_level += 1 - for child in layout.children: - self.write(u'%s%s%s ' % (linesep, indent, bullet)) - child.accept(self) - self.list_level -= 1 - - def visit_link(self, layout): - """add a hyperlink""" - if layout.label != layout.url: - self.write(u'`%s`_' % layout.label) - self.pending_urls.append( (layout.label, layout.url) ) - else: - self.write(layout.url) - - def visit_verbatimtext(self, layout): - """display a verbatim layout as text (so difficult ;) - """ - self.writeln(u'::\n') - for line in layout.data.splitlines(): - self.writeln(u' ' + line) - self.writeln() - - def visit_text(self, layout): - """add some text""" - self.write(u'%s' % layout.data) diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/urllib2ext.py b/pymode/libs/logilab-common-1.4.1/logilab/common/urllib2ext.py deleted file mode 100644 index 339aec06..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/urllib2ext.py +++ /dev/null @@ -1,89 +0,0 @@ -from __future__ import print_function - -import logging -import urllib2 - -import kerberos as krb - -class GssapiAuthError(Exception): - """raised on error during authentication process""" - -import re -RGX = re.compile('(?:.*,)*\s*Negotiate\s*([^,]*),?', re.I) - -def get_negociate_value(headers): - for authreq in headers.getheaders('www-authenticate'): - match = RGX.search(authreq) - if match: - return match.group(1) - -class HTTPGssapiAuthHandler(urllib2.BaseHandler): - """Negotiate HTTP authentication using context from GSSAPI""" - - handler_order = 400 # before Digest Auth - - def __init__(self): - self._reset() - - def _reset(self): - self._retried = 0 - self._context = None - - def clean_context(self): - if self._context is not None: - krb.authGSSClientClean(self._context) - - def http_error_401(self, req, fp, code, msg, headers): - try: - if self._retried > 5: - raise urllib2.HTTPError(req.get_full_url(), 401, - "negotiate auth failed", headers, None) - self._retried += 1 - logging.debug('gssapi handler, try %s' % self._retried) - negotiate = get_negociate_value(headers) - if negotiate is None: - logging.debug('no negociate found in a www-authenticate header') - return None - logging.debug('HTTPGssapiAuthHandler: negotiate 1 is %r' % negotiate) - result, self._context = krb.authGSSClientInit("HTTP@%s" % req.get_host()) - if result < 1: - raise GssapiAuthError("HTTPGssapiAuthHandler: init failed with %d" % result) - result = krb.authGSSClientStep(self._context, negotiate) - if result < 0: - raise GssapiAuthError("HTTPGssapiAuthHandler: step 1 failed with %d" % result) - client_response = krb.authGSSClientResponse(self._context) - logging.debug('HTTPGssapiAuthHandler: client response is %s...' % client_response[:10]) - req.add_unredirected_header('Authorization', "Negotiate %s" % client_response) - server_response = self.parent.open(req) - negotiate = get_negociate_value(server_response.info()) - if negotiate is None: - logging.warning('HTTPGssapiAuthHandler: failed to authenticate server') - else: - logging.debug('HTTPGssapiAuthHandler negotiate 2: %s' % negotiate) - result = krb.authGSSClientStep(self._context, negotiate) - if result < 1: - raise GssapiAuthError("HTTPGssapiAuthHandler: step 2 failed with %d" % result) - return server_response - except GssapiAuthError as exc: - logging.error(repr(exc)) - finally: - self.clean_context() - self._reset() - -if __name__ == '__main__': - import sys - # debug - import httplib - httplib.HTTPConnection.debuglevel = 1 - httplib.HTTPSConnection.debuglevel = 1 - # debug - import logging - logging.basicConfig(level=logging.DEBUG) - # handle cookies - import cookielib - cj = cookielib.CookieJar() - ch = urllib2.HTTPCookieProcessor(cj) - # test with url sys.argv[1] - h = HTTPGssapiAuthHandler() - response = urllib2.build_opener(h, ch).open(sys.argv[1]) - print('\nresponse: %s\n--------------\n' % response.code, response.info()) diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/vcgutils.py b/pymode/libs/logilab-common-1.4.1/logilab/common/vcgutils.py deleted file mode 100644 index 9cd2acda..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/vcgutils.py +++ /dev/null @@ -1,216 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Functions to generate files readable with Georg Sander's vcg -(Visualization of Compiler Graphs). - -You can download vcg at http://rw4.cs.uni-sb.de/~sander/html/gshome.html -Note that vcg exists as a debian package. - -See vcg's documentation for explanation about the different values that -maybe used for the functions parameters. - - - - -""" -__docformat__ = "restructuredtext en" - -import string - -ATTRS_VAL = { - 'algos': ('dfs', 'tree', 'minbackward', - 'left_to_right', 'right_to_left', - 'top_to_bottom', 'bottom_to_top', - 'maxdepth', 'maxdepthslow', 'mindepth', 'mindepthslow', - 'mindegree', 'minindegree', 'minoutdegree', - 'maxdegree', 'maxindegree', 'maxoutdegree'), - 'booleans': ('yes', 'no'), - 'colors': ('black', 'white', 'blue', 'red', 'green', 'yellow', - 'magenta', 'lightgrey', - 'cyan', 'darkgrey', 'darkblue', 'darkred', 'darkgreen', - 'darkyellow', 'darkmagenta', 'darkcyan', 'gold', - 'lightblue', 'lightred', 'lightgreen', 'lightyellow', - 'lightmagenta', 'lightcyan', 'lilac', 'turquoise', - 'aquamarine', 'khaki', 'purple', 'yellowgreen', 'pink', - 'orange', 'orchid'), - 'shapes': ('box', 'ellipse', 'rhomb', 'triangle'), - 'textmodes': ('center', 'left_justify', 'right_justify'), - 'arrowstyles': ('solid', 'line', 'none'), - 'linestyles': ('continuous', 'dashed', 'dotted', 'invisible'), - } - -# meaning of possible values: -# O -> string -# 1 -> int -# list -> value in list -GRAPH_ATTRS = { - 'title': 0, - 'label': 0, - 'color': ATTRS_VAL['colors'], - 'textcolor': ATTRS_VAL['colors'], - 'bordercolor': ATTRS_VAL['colors'], - 'width': 1, - 'height': 1, - 'borderwidth': 1, - 'textmode': ATTRS_VAL['textmodes'], - 'shape': ATTRS_VAL['shapes'], - 'shrink': 1, - 'stretch': 1, - 'orientation': ATTRS_VAL['algos'], - 'vertical_order': 1, - 'horizontal_order': 1, - 'xspace': 1, - 'yspace': 1, - 'layoutalgorithm': ATTRS_VAL['algos'], - 'late_edge_labels': ATTRS_VAL['booleans'], - 'display_edge_labels': ATTRS_VAL['booleans'], - 'dirty_edge_labels': ATTRS_VAL['booleans'], - 'finetuning': ATTRS_VAL['booleans'], - 'manhattan_edges': ATTRS_VAL['booleans'], - 'smanhattan_edges': ATTRS_VAL['booleans'], - 'port_sharing': ATTRS_VAL['booleans'], - 'edges': ATTRS_VAL['booleans'], - 'nodes': ATTRS_VAL['booleans'], - 'splines': ATTRS_VAL['booleans'], - } -NODE_ATTRS = { - 'title': 0, - 'label': 0, - 'color': ATTRS_VAL['colors'], - 'textcolor': ATTRS_VAL['colors'], - 'bordercolor': ATTRS_VAL['colors'], - 'width': 1, - 'height': 1, - 'borderwidth': 1, - 'textmode': ATTRS_VAL['textmodes'], - 'shape': ATTRS_VAL['shapes'], - 'shrink': 1, - 'stretch': 1, - 'vertical_order': 1, - 'horizontal_order': 1, - } -EDGE_ATTRS = { - 'sourcename': 0, - 'targetname': 0, - 'label': 0, - 'linestyle': ATTRS_VAL['linestyles'], - 'class': 1, - 'thickness': 0, - 'color': ATTRS_VAL['colors'], - 'textcolor': ATTRS_VAL['colors'], - 'arrowcolor': ATTRS_VAL['colors'], - 'backarrowcolor': ATTRS_VAL['colors'], - 'arrowsize': 1, - 'backarrowsize': 1, - 'arrowstyle': ATTRS_VAL['arrowstyles'], - 'backarrowstyle': ATTRS_VAL['arrowstyles'], - 'textmode': ATTRS_VAL['textmodes'], - 'priority': 1, - 'anchor': 1, - 'horizontal_order': 1, - } - - -# Misc utilities ############################################################### - -def latin_to_vcg(st): - """Convert latin characters using vcg escape sequence. - """ - for char in st: - if char not in string.ascii_letters: - try: - num = ord(char) - if num >= 192: - st = st.replace(char, r'\fi%d'%ord(char)) - except: - pass - return st - - -class VCGPrinter: - """A vcg graph writer. - """ - - def __init__(self, output_stream): - self._stream = output_stream - self._indent = '' - - def open_graph(self, **args): - """open a vcg graph - """ - self._stream.write('%sgraph:{\n'%self._indent) - self._inc_indent() - self._write_attributes(GRAPH_ATTRS, **args) - - def close_graph(self): - """close a vcg graph - """ - self._dec_indent() - self._stream.write('%s}\n'%self._indent) - - - def node(self, title, **args): - """draw a node - """ - self._stream.write('%snode: {title:"%s"' % (self._indent, title)) - self._write_attributes(NODE_ATTRS, **args) - self._stream.write('}\n') - - - def edge(self, from_node, to_node, edge_type='', **args): - """draw an edge from a node to another. - """ - self._stream.write( - '%s%sedge: {sourcename:"%s" targetname:"%s"' % ( - self._indent, edge_type, from_node, to_node)) - self._write_attributes(EDGE_ATTRS, **args) - self._stream.write('}\n') - - - # private ################################################################## - - def _write_attributes(self, attributes_dict, **args): - """write graph, node or edge attributes - """ - for key, value in args.items(): - try: - _type = attributes_dict[key] - except KeyError: - raise Exception('''no such attribute %s -possible attributes are %s''' % (key, attributes_dict.keys())) - - if not _type: - self._stream.write('%s%s:"%s"\n' % (self._indent, key, value)) - elif _type == 1: - self._stream.write('%s%s:%s\n' % (self._indent, key, - int(value))) - elif value in _type: - self._stream.write('%s%s:%s\n' % (self._indent, key, value)) - else: - raise Exception('''value %s isn\'t correct for attribute %s -correct values are %s''' % (value, key, _type)) - - def _inc_indent(self): - """increment indentation - """ - self._indent = ' %s' % self._indent - - def _dec_indent(self): - """decrement indentation - """ - self._indent = self._indent[:-2] diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/visitor.py b/pymode/libs/logilab-common-1.4.1/logilab/common/visitor.py deleted file mode 100644 index ed2b70f9..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/visitor.py +++ /dev/null @@ -1,109 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""A generic visitor abstract implementation. - - - - -""" -__docformat__ = "restructuredtext en" - -def no_filter(_): - return 1 - -# Iterators ################################################################### -class FilteredIterator(object): - - def __init__(self, node, list_func, filter_func=None): - self._next = [(node, 0)] - if filter_func is None: - filter_func = no_filter - self._list = list_func(node, filter_func) - - def __next__(self): - try: - return self._list.pop(0) - except : - return None - - next = __next__ - -# Base Visitor ################################################################ -class Visitor(object): - - def __init__(self, iterator_class, filter_func=None): - self._iter_class = iterator_class - self.filter = filter_func - - def visit(self, node, *args, **kargs): - """ - launch the visit on a given node - - call 'open_visit' before the beginning of the visit, with extra args - given - when all nodes have been visited, call the 'close_visit' method - """ - self.open_visit(node, *args, **kargs) - return self.close_visit(self._visit(node)) - - def _visit(self, node): - iterator = self._get_iterator(node) - n = next(iterator) - while n: - result = n.accept(self) - n = next(iterator) - return result - - def _get_iterator(self, node): - return self._iter_class(node, self.filter) - - def open_visit(self, *args, **kargs): - """ - method called at the beginning of the visit - """ - pass - - def close_visit(self, result): - """ - method called at the end of the visit - """ - return result - -# standard visited mixin ###################################################### -class VisitedMixIn(object): - """ - Visited interface allow node visitors to use the node - """ - def get_visit_name(self): - """ - return the visit name for the mixed class. When calling 'accept', the - method <'visit_' + name returned by this method> will be called on the - visitor - """ - try: - return self.TYPE.replace('-', '_') - except: - return self.__class__.__name__.lower() - - def accept(self, visitor, *args, **kwargs): - func = getattr(visitor, 'visit_%s' % self.get_visit_name()) - return func(self, *args, **kwargs) - - def leave(self, visitor, *args, **kwargs): - func = getattr(visitor, 'leave_%s' % self.get_visit_name()) - return func(self, *args, **kwargs) diff --git a/pymode/libs/logilab-common-1.4.1/logilab/common/xmlutils.py b/pymode/libs/logilab-common-1.4.1/logilab/common/xmlutils.py deleted file mode 100644 index d383b9d5..00000000 --- a/pymode/libs/logilab-common-1.4.1/logilab/common/xmlutils.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""XML utilities. - -This module contains useful functions for parsing and using XML data. For the -moment, there is only one function that can parse the data inside a processing -instruction and return a Python dictionary. - - - - -""" -__docformat__ = "restructuredtext en" - -import re - -RE_DOUBLE_QUOTE = re.compile('([\w\-\.]+)="([^"]+)"') -RE_SIMPLE_QUOTE = re.compile("([\w\-\.]+)='([^']+)'") - -def parse_pi_data(pi_data): - """ - Utility function that parses the data contained in an XML - processing instruction and returns a dictionary of keywords and their - associated values (most of the time, the processing instructions contain - data like ``keyword="value"``, if a keyword is not associated to a value, - for example ``keyword``, it will be associated to ``None``). - - :param pi_data: data contained in an XML processing instruction. - :type pi_data: unicode - - :returns: Dictionary of the keywords (Unicode strings) associated to - their values (Unicode strings) as they were defined in the - data. - :rtype: dict - """ - results = {} - for elt in pi_data.split(): - if RE_DOUBLE_QUOTE.match(elt): - kwd, val = RE_DOUBLE_QUOTE.match(elt).groups() - elif RE_SIMPLE_QUOTE.match(elt): - kwd, val = RE_SIMPLE_QUOTE.match(elt).groups() - else: - kwd, val = elt, None - results[kwd] = val - return results diff --git a/pymode/libs/logilab-common-1.4.1/setup.cfg b/pymode/libs/logilab-common-1.4.1/setup.cfg deleted file mode 100644 index 8b48b197..00000000 --- a/pymode/libs/logilab-common-1.4.1/setup.cfg +++ /dev/null @@ -1,9 +0,0 @@ -[bdist_rpm] -packager = Sylvain Thenault -provides = logilab.common - -[egg_info] -tag_build = -tag_date = 0 -tag_svn_revision = 0 - diff --git a/pymode/libs/logilab-common-1.4.1/setup.py b/pymode/libs/logilab-common-1.4.1/setup.py deleted file mode 100644 index c565ee15..00000000 --- a/pymode/libs/logilab-common-1.4.1/setup.py +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env python -# pylint: disable=W0404,W0622,W0704,W0613,W0152 -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Generic Setup script, takes package info from __pkginfo__.py file. -""" -__docformat__ = "restructuredtext en" - -from setuptools import setup, find_packages -from io import open -from os import path - -here = path.abspath(path.dirname(__file__)) - -pkginfo = {} -with open(path.join(here, '__pkginfo__.py')) as f: - exec(f.read(), pkginfo) - -# Get the long description from the relevant file -with open(path.join(here, 'README'), encoding='utf-8') as f: - long_description = f.read() - -setup( - name=pkginfo['distname'], - version=pkginfo['version'], - description=pkginfo['description'], - long_description=long_description, - url=pkginfo['web'], - author=pkginfo['author'], - author_email=pkginfo['author_email'], - license=pkginfo['license'], - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=pkginfo['classifiers'], - packages=find_packages(exclude=['contrib', 'docs', 'test*']), - namespace_packages=[pkginfo['subpackage_of']], - install_requires=pkginfo['install_requires'], - tests_require=pkginfo['tests_require'], - scripts=pkginfo['scripts'], -) diff --git a/pymode/libs/logilab-common-1.4.1/test/data/ChangeLog b/pymode/libs/logilab-common-1.4.1/test/data/ChangeLog deleted file mode 100644 index 22a45529..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/ChangeLog +++ /dev/null @@ -1,184 +0,0 @@ -ChangeLog for logilab.devtools -============================== - - -- - * added the missing dos2unix script to the distribution - - * major debianize refactoring using class / inheritance instead of - functions composition - - * import the version control library from oobrother extended with code - from devtools / apycot - - * Singing in the rain: - - - I'm - - singing in the rain - - * Big change multiline - tata titi toto - - - small change - - other change - - multiline change - really ? - - Eat your vegetable and brush after every meals - - - -2004-02-13 -- 0.4.5 - * fix debianize to handle dependencies to python standalone package - (ie no "python" prefix in the default package) - - * fixed cvslog in rlog mode - - - -2004-02-11 -- 0.4.4 - * check web and ftp variables from __pkginfo__ - - * check for long and short descriptions in __pkginfo__ - - * outdated copyright is now a warning - - * consider distuils automaticaly install .c files - - * fix check_package exit status - - * merged sgml, elisp and data packages in generated debian files - - - -2003-12-05 -- 0.4.3 - * fix bug in buildeb making it usable from buildpackage... - - - -2003-11-24 -- 0.4.2 - * fixed pb with check_info_module and catalog, when not launched from the - package directory - - * ignore build directory in check_manifest - - * fix to avoid pb with "non executed" docstring in pycoverage - - * add support for --help and fix exit status to pycoverage - - - -2003-11-20 -- 0.4.1 - * added code coverage tool, starting from - http://www.garethrees.org/2001/12/04/python-coverage/ - - * added --help option to buildeb - - - -2003-11-14 -- 0.4.0 - * added a python script buildeb to build debian package (buildpackage call - this script now) - - * debianize now puts tests in a separated package (-test) and generate - package for zope >= 2.6.2 (i.e. python 2.2) - - * fix detection of examples directory in pkginfo - - * fix debhelper dependency in build-depends - - * remove minor bug in buildpackage (try to move archive.gz instead of - archive.tar.gz - - * bug fix in debianize zope handler - - - -2003-10-06 -- 0.3.4 - * remove important bug in buildpackage (rm sourcetree when building a - source distrib) - - * add version to dependency between main packages and sub-packages (-data, - -elisp and -sgml) - - * change way of creating the .orig.tar.gz - - * create source distribution when building debian package - - * fix path in log message for MANIFEST.in, __pkginfo__ and bin directory - - * make changelog more robust - - * debianize bug fixes - - - -2003-09-22 -- 0.3.3 - * fix python.postinst script to avoid compiling of others packages :) - - - -2003-09-19 -- 0.3.2 - * add basic support for XSLT distribution - - * fix DTD and catalog handling in debianize - - * fix bug in check_pkginfo - - * updated documentation - - - -2003-09-18 -- 0.3.1 - * add support for data files in debianize - - * test python version in debianize - - * minor fixes - - * updated setup.py template - - - -2003-09-18 -- 0.3.0 - * updates for a new packaging standard - - * removed jabbercli, cvs_filecheck - - * added preparedistrib, tagpackage, pkginfo - - * simpler debianize relying on a generic setup.py - - * fix some debian templates - - * checkpackage rewrite - - * provides checkers for the tester package - - - -2003-08-29 -- 0.2.4 - * added cvs_filecheck - - - -2003-06-20 -- 0.2.2 - * buildpackages fixes - - - -2003-06-17 -- 0.2.1 - * fix setup.py - - * make pkghandlers.export working with python <= 2.1 - - * add the mailinglist variable in __pkginfo__, used for announce - generation in makedistrib - - - -2003-06-16 -- 0.2.0 - * minor enhancements - - * get package information for __pkginfo__.py - - - diff --git a/pymode/libs/logilab-common-1.4.1/test/data/MyPyPa-0.1.0.zip b/pymode/libs/logilab-common-1.4.1/test/data/MyPyPa-0.1.0.zip deleted file mode 100644 index a7b3125f999ac1f8e8bd5b5260d3de1eeb840fec..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 206 zcmWIWW@h1H00CCVN~>2f#aSXiHV6wb$S~wq7E~4_>c_`t=4F<|$LkeThK6u5Fh|`u z=?TK672FJrEZ>Ko&DF0`ZbY d5DS}qtPuOq>. -"""logilab.common packaging information""" -__docformat__ = "restructuredtext en" -import sys -import os - -distname = 'logilab-common' -modname = 'common' -subpackage_of = 'logilab' -subpackage_master = True - -numversion = (0, 63, 2) -version = '.'.join([str(num) for num in numversion]) - -license = 'LGPL' # 2.1 or later -description = "collection of low-level Python packages and modules used by Logilab projects" -web = "http://www.logilab.org/project/%s" % distname -mailinglist = "mailto://python-projects@lists.logilab.org" -author = "Logilab" -author_email = "contact@logilab.fr" - - -from os.path import join -scripts = [join('bin', 'logilab-pytest')] -include_dirs = [join('test', 'data')] - -install_requires = [ - 'six >= 1.4.0', - ] -tests_require = ['pytz'] - -if sys.version_info < (2, 7): - install_requires.append('unittest2 >= 0.5.1') -if os.name == 'nt': - install_requires.append('colorama') - -classifiers = ["Topic :: Utilities", - "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 3", - ] diff --git a/pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/NOTHING b/pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/NOTHING deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/README b/pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/README deleted file mode 100644 index 27ab0b99..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/README +++ /dev/null @@ -1 +0,0 @@ -thank you diff --git a/pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/subdir/coin b/pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/subdir/coin deleted file mode 100644 index 0e46b314..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/subdir/coin +++ /dev/null @@ -1 +0,0 @@ -baba diff --git a/pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/subdir/toto.txt b/pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/subdir/toto.txt deleted file mode 100644 index 785a58b9..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/content_differ_dir/subdir/toto.txt +++ /dev/null @@ -1,53 +0,0 @@ -Lorem ipsum dolor sit amet, consectetuer adipisci elit. Necesse qui -quidem constituam tantis, et possunt placeat ipsum ex aut iucunde aut -facta, aut impediente autem totum unum directam eius tum voluptate -sensuum reperiuntur ad ab, quae ac.. Sed eius enim a, tranquillat ob -vexetur permagna potius voluptate eo aliae, vivamus esse solis ut non, -atomis videatur in ut, mihi litteris si ante vivere, deinde -emancipaverat appetendum sine erant ex metu philosophiae fatemur, et -magis non corpora ne, maluisti ita locupletiorem medicorum.. Tradere -imperitos exiguam in sint saluti temeritate hoc, nullam nec quaerat, -eademque vivendum, contra similique. - -Molestiae qui, tam sic ea honesto, graeca consecutionem voluptate -inertissimae sunt, corpora denique fabulis dicere ab et quae ad -politus tum in nostris.. Plane pueriliter, hoc affectus quid iis plus -videtur dolorem vivere ad esse asperiores.. Quorum si nihilo eram -conflixisse nec inpotenti, et bonum ad nostris servare omni, saepe -multis, consequantur id, in fructuosam multi quod, voluptatem abducat -a tantum sit error ipso si respirare corrupte referuntur, maiorem.. -Voluptatem a etiam perspici gravissimas, cuius.. Unum morbis ne esse -conscientia tamen conclusionemque notionem, amentur quam, praeclarorum -eum consulatu iis invitat solum porro, quidem ad patria, fore res -athenis sempiternum alii venire, est mei nam improbis dolorem, -permulta timidiores. - -Et inquam sic familias, sequatur animis quae et quae ea esse, autem -impediri quaeque modo inciderint consecutionem expectata, sed severa -etiamsi, in egregios temporibus infinito ad artibus, voluptatem -aristotele, tandem aliquo industriae collegi timiditatem sibi igitur -aut, se cum tranquillitate loquuntur quod nullo, quam suum illustribus -fugiendam illis tam consequatur.. Quas maximisque impendere ipsum se -petat altera enim ocurreret sibi maxime, possit ea aegritudo aut ulla, -et quod sed. - -Verissimum confirmat accurate totam iisque sequitur aut probabo et et -adhibenda, mihi sed ad et quod erga minima rerum eius quod, tale et -libidinosarum liber, omnis quae et nunc sicine, nec at aut omnem, -sententiae a, repudiandae.. Vero esse crudelis amentur ut, atque -facilius vita invitat, delectus excepturi ex libidinum non qua -consequi beate quae ratio.. Illa poetis videor requirere, quippiam et -autem ut et esset voluptate neque consilia sed voluptatibus est -virtutum minima et, interesse exquirere et peccandi quae carere se, -angere.. Firme nomine oratio perferendis si voluptates cogitavisse, -feci maledici ea vis et, nam quae legantur animum animis temeritate, -amicitiam desideraturam tollatur nisi de voluptatem. - -Ii videri accedit de.. Graeci tum factis ea ea itaque sunt latinis -detractis reprehensiones nostrum sola non tantopere perfruique quoque -fruenda aptissimum nostrum, pueros graeca qui eruditionem est quae, -labore.. Omnia si quaerimus, si praetermissum vero deserunt quia -democriti retinere ignoratione, iam de gerendarum vel a maxime -provident, in eadem si praeterierunt, certa cibo ut utilitatibus nullo -quod voluptatis iis eamque omnia, stare aut, quamquam et, ut illa -susceperant legant consiliisque, est sed quantum igitur. diff --git a/pymode/libs/logilab-common-1.4.1/test/data/deprecation.py b/pymode/libs/logilab-common-1.4.1/test/data/deprecation.py deleted file mode 100644 index be3b1031..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/deprecation.py +++ /dev/null @@ -1,4 +0,0 @@ -# placeholder used by unittest_deprecation - -def moving_target(): - pass diff --git a/pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/NOTHING b/pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/NOTHING deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/README b/pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/README deleted file mode 100644 index 27ab0b99..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/README +++ /dev/null @@ -1 +0,0 @@ -thank you diff --git a/pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/subdir/toto.txt b/pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/subdir/toto.txt deleted file mode 100644 index 4bf7233a..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/subdir/toto.txt +++ /dev/null @@ -1,53 +0,0 @@ -Lorem ipsum dolor sit amet, consectetuer adipisci elit. Necesse qui -quidem constituam tantis, et possunt placeat ipsum ex aut iucunde aut -facta, aut impediente autem totum unum directam eius tum voluptate -sensuum reperiuntur ad ab, quae ac.. Sed eius enim a, tranquillat ob -vexetur permagna potius voluptate eo aliae, vivamus esse solis ut non, -atomis videatur in ut, mihi litteris si ante vivere, deinde -emancipaverat appetendum sine erant ex metu philosophiae fatemur, et -magis non corpora ne, maluisti ita locupletiorem medicorum.. Tradere -imperitos exiguam in sint saluti temeritate hoc, nullam nec quaerat, -eademque vivendum, contra similique. - -Molestiae qui, tam sic ea honesto, graeca consecutionem voluptate -inertissimae sunt, corpora denique fabulis dicere ab et quae ad -politus tum in nostris.. Plane pueriliter, hoc affectus quid iis plus -videtur dolorem vivere ad esse asperiores.. Quorum si nihilo eram -pedalis pertinax ii minus, referta mediocrem iustitiam acutum quo -rerum constringendos ex pondere lucilius essent neglexerit insequitur -a tantum sit error ipso si respirare corrupte referuntur, maiorem.. -Voluptatem a etiam perspici gravissimas, cuius.. Unum morbis ne esse -conscientia tamen conclusionemque notionem, amentur quam, praeclarorum -eum consulatu iis invitat solum porro, quidem ad patria, fore res -athenis sempiternum alii venire, est mei nam improbis dolorem, -permulta timidiores. - -Et inquam sic familias, sequatur animis quae et quae ea esse, autem -impediri quaeque modo inciderint consecutionem expectata, sed severa -etiamsi, in egregios temporibus infinito ad artibus, voluptatem -aristotele, tandem aliquo industriae collegi timiditatem sibi igitur -aut, se cum tranquillitate loquuntur quod nullo, quam suum illustribus -fugiendam illis tam consequatur.. Quas maximisque impendere ipsum se -petat altera enim ocurreret sibi maxime, possit ea aegritudo aut ulla, -et quod sed. - -Verissimum confirmat accurate totam iisque sequitur aut probabo et et -adhibenda, mihi sed ad et quod erga minima rerum eius quod, tale et -libidinosarum liber, omnis quae et nunc sicine, nec at aut omnem, -sententiae a, repudiandae.. Vero esse crudelis amentur ut, atque -facilius vita invitat, delectus excepturi ex libidinum non qua -consequi beate quae ratio.. Illa poetis videor requirere, quippiam et -autem ut et esset voluptate neque consilia sed voluptatibus est -virtutum minima et, interesse exquirere et peccandi quae carere se, -angere.. Firme nomine oratio perferendis si voluptates cogitavisse, -feci maledici ea vis et, nam quae legantur animum animis temeritate, -amicitiam desideraturam tollatur nisi de voluptatem. - -Ii videri accedit de.. Graeci tum factis ea ea itaque sunt latinis -detractis reprehensiones nostrum sola non tantopere perfruique quoque -fruenda aptissimum nostrum, pueros graeca qui eruditionem est quae, -labore.. Omnia si quaerimus, si praetermissum vero deserunt quia -democriti retinere ignoratione, iam de gerendarum vel a maxime -provident, in eadem si praeterierunt, certa cibo ut utilitatibus nullo -quod voluptatis iis eamque omnia, stare aut, quamquam et, ut illa -susceperant legant consiliisque, est sed quantum igitur. diff --git a/pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/subdirtwo/Hello b/pymode/libs/logilab-common-1.4.1/test/data/file_differ_dir/subdirtwo/Hello deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/__init__.py b/pymode/libs/logilab-common-1.4.1/test/data/find_test/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/foo.txt b/pymode/libs/logilab-common-1.4.1/test/data/find_test/foo.txt deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/module.py b/pymode/libs/logilab-common-1.4.1/test/data/find_test/module.py deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/module2.py b/pymode/libs/logilab-common-1.4.1/test/data/find_test/module2.py deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/newlines.txt b/pymode/libs/logilab-common-1.4.1/test/data/find_test/newlines.txt deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/noendingnewline.py b/pymode/libs/logilab-common-1.4.1/test/data/find_test/noendingnewline.py deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/nonregr.py b/pymode/libs/logilab-common-1.4.1/test/data/find_test/nonregr.py deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/normal_file.txt b/pymode/libs/logilab-common-1.4.1/test/data/find_test/normal_file.txt deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/spam.txt b/pymode/libs/logilab-common-1.4.1/test/data/find_test/spam.txt deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/sub/doc.txt b/pymode/libs/logilab-common-1.4.1/test/data/find_test/sub/doc.txt deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/sub/momo.py b/pymode/libs/logilab-common-1.4.1/test/data/find_test/sub/momo.py deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/test.ini b/pymode/libs/logilab-common-1.4.1/test/data/find_test/test.ini deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/test1.msg b/pymode/libs/logilab-common-1.4.1/test/data/find_test/test1.msg deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/test2.msg b/pymode/libs/logilab-common-1.4.1/test/data/find_test/test2.msg deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/find_test/write_protected_file.txt b/pymode/libs/logilab-common-1.4.1/test/data/find_test/write_protected_file.txt deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/foo.txt b/pymode/libs/logilab-common-1.4.1/test/data/foo.txt deleted file mode 100644 index a08c29e4..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/foo.txt +++ /dev/null @@ -1,9 +0,0 @@ -a -b -c -d -e -f -g -h - diff --git a/pymode/libs/logilab-common-1.4.1/test/data/lmfp/__init__.py b/pymode/libs/logilab-common-1.4.1/test/data/lmfp/__init__.py deleted file mode 100644 index 74b26b82..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/lmfp/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# force a "direct" python import -from . import foo diff --git a/pymode/libs/logilab-common-1.4.1/test/data/lmfp/foo.py b/pymode/libs/logilab-common-1.4.1/test/data/lmfp/foo.py deleted file mode 100644 index 8f7de1e8..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/lmfp/foo.py +++ /dev/null @@ -1,6 +0,0 @@ -import sys -if not getattr(sys, 'bar', None): - sys.just_once = [] -# there used to be two numbers here because -# of a load_module_from_path bug -sys.just_once.append(42) diff --git a/pymode/libs/logilab-common-1.4.1/test/data/module.py b/pymode/libs/logilab-common-1.4.1/test/data/module.py deleted file mode 100644 index 493e6762..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/module.py +++ /dev/null @@ -1,69 +0,0 @@ -# -*- coding: Latin-1 -*- -"""test module for astng -""" -from __future__ import print_function - -from logilab.common import modutils, Execute as spawn -from logilab.common.astutils import * -import os.path - -MY_DICT = {} - - -def global_access(key, val): - """function test""" - local = 1 - MY_DICT[key] = val - for i in val: - if i: - del MY_DICT[i] - continue - else: - break - else: - print('!!!') - -class YO: - """hehe""" - a=1 - def __init__(self): - try: - self.yo = 1 - except ValueError as ex: - pass - except (NameError, TypeError): - raise XXXError() - except: - raise - -#print('*****>',YO.__dict__) -class YOUPI(YO): - class_attr = None - - def __init__(self): - self.member = None - - def method(self): - """method test""" - global MY_DICT - try: - MY_DICT = {} - local = None - autre = [a for a, b in MY_DICT if b] - if b in autre: - print('yo', end=' ') - elif a in autre: - print('hehe') - global_access(local, val=autre) - finally: - return local - - def static_method(): - """static method test""" - assert MY_DICT, '???' - static_method = staticmethod(static_method) - - def class_method(cls): - """class method test""" - exec(a, b) - class_method = classmethod(class_method) diff --git a/pymode/libs/logilab-common-1.4.1/test/data/module2.py b/pymode/libs/logilab-common-1.4.1/test/data/module2.py deleted file mode 100644 index 51509f3b..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/module2.py +++ /dev/null @@ -1,77 +0,0 @@ -from data.module import YO, YOUPI -import data - -class Specialization(YOUPI, YO): pass - -class Metaclass(type): pass - -class Interface: pass - -class MyIFace(Interface): pass - -class AnotherIFace(Interface): pass - -class MyException(Exception): pass -class MyError(MyException): pass - -class AbstractClass(object): - - def to_override(self, whatever): - raise NotImplementedError() - - def return_something(self, param): - if param: - return 'toto' - return - -class Concrete0: - __implements__ = MyIFace -class Concrete1: - __implements__ = MyIFace, AnotherIFace -class Concrete2: - __implements__ = (MyIFace, - AnotherIFace) -class Concrete23(Concrete1): pass - -del YO.member - -del YO -[SYN1, SYN2] = Concrete0, Concrete1 -assert '1' -b = 1 | 2 & 3 ^ 8 -exec('c = 3') -exec('c = 3', {}, {}) - -def raise_string(a=2, *args, **kwargs): - raise 'pas glop' - raise Exception('yo') - yield 'coucou' - -a = b + 2 -c = b * 2 -c = b / 2 -c = b // 2 -c = b - 2 -c = b % 2 -c = b ** 2 -c = b << 2 -c = b >> 2 -c = ~b - -c = not b - -d = [c] -e = d[:] -e = d[a:b:c] - -raise_string(*args, **kwargs) - -print >> stream, 'bonjour' -print >> stream, 'salut', - - -def make_class(any, base=data.module.YO, *args, **kwargs): - """check base is correctly resolved to Concrete0""" - class Aaaa(base): - """dynamic class""" - return Aaaa diff --git a/pymode/libs/logilab-common-1.4.1/test/data/newlines.txt b/pymode/libs/logilab-common-1.4.1/test/data/newlines.txt deleted file mode 100644 index e1f25c09..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/newlines.txt +++ /dev/null @@ -1,3 +0,0 @@ -# mixed new lines -1 -2 3 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/noendingnewline.py b/pymode/libs/logilab-common-1.4.1/test/data/noendingnewline.py deleted file mode 100644 index 110f902d..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/noendingnewline.py +++ /dev/null @@ -1,36 +0,0 @@ -from __future__ import print_function - -import unittest - - -class TestCase(unittest.TestCase): - - def setUp(self): - unittest.TestCase.setUp(self) - - - def tearDown(self): - unittest.TestCase.tearDown(self) - - def testIt(self): - self.a = 10 - self.xxx() - - - def xxx(self): - if False: - pass - print('a') - - if False: - pass - pass - - if False: - pass - print('rara') - - -if __name__ == '__main__': - print('test2') - unittest.main() diff --git a/pymode/libs/logilab-common-1.4.1/test/data/nonregr.py b/pymode/libs/logilab-common-1.4.1/test/data/nonregr.py deleted file mode 100644 index a4b5ef7d..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/nonregr.py +++ /dev/null @@ -1,16 +0,0 @@ -from __future__ import print_function - -try: - enumerate = enumerate -except NameError: - - def enumerate(iterable): - """emulates the python2.3 enumerate() function""" - i = 0 - for val in iterable: - yield i, val - i += 1 - -def toto(value): - for k, v in value: - print(v.get('yo')) diff --git a/pymode/libs/logilab-common-1.4.1/test/data/normal_file.txt b/pymode/libs/logilab-common-1.4.1/test/data/normal_file.txt deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/NOTHING b/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/NOTHING deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/README b/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/README deleted file mode 100644 index 27ab0b99..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/README +++ /dev/null @@ -1 +0,0 @@ -thank you diff --git a/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/subdir/coin b/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/subdir/coin deleted file mode 100644 index 0e46b314..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/subdir/coin +++ /dev/null @@ -1 +0,0 @@ -baba diff --git a/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/subdir/toto.txt b/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/subdir/toto.txt deleted file mode 100644 index 4bf7233a..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/reference_dir/subdir/toto.txt +++ /dev/null @@ -1,53 +0,0 @@ -Lorem ipsum dolor sit amet, consectetuer adipisci elit. Necesse qui -quidem constituam tantis, et possunt placeat ipsum ex aut iucunde aut -facta, aut impediente autem totum unum directam eius tum voluptate -sensuum reperiuntur ad ab, quae ac.. Sed eius enim a, tranquillat ob -vexetur permagna potius voluptate eo aliae, vivamus esse solis ut non, -atomis videatur in ut, mihi litteris si ante vivere, deinde -emancipaverat appetendum sine erant ex metu philosophiae fatemur, et -magis non corpora ne, maluisti ita locupletiorem medicorum.. Tradere -imperitos exiguam in sint saluti temeritate hoc, nullam nec quaerat, -eademque vivendum, contra similique. - -Molestiae qui, tam sic ea honesto, graeca consecutionem voluptate -inertissimae sunt, corpora denique fabulis dicere ab et quae ad -politus tum in nostris.. Plane pueriliter, hoc affectus quid iis plus -videtur dolorem vivere ad esse asperiores.. Quorum si nihilo eram -pedalis pertinax ii minus, referta mediocrem iustitiam acutum quo -rerum constringendos ex pondere lucilius essent neglexerit insequitur -a tantum sit error ipso si respirare corrupte referuntur, maiorem.. -Voluptatem a etiam perspici gravissimas, cuius.. Unum morbis ne esse -conscientia tamen conclusionemque notionem, amentur quam, praeclarorum -eum consulatu iis invitat solum porro, quidem ad patria, fore res -athenis sempiternum alii venire, est mei nam improbis dolorem, -permulta timidiores. - -Et inquam sic familias, sequatur animis quae et quae ea esse, autem -impediri quaeque modo inciderint consecutionem expectata, sed severa -etiamsi, in egregios temporibus infinito ad artibus, voluptatem -aristotele, tandem aliquo industriae collegi timiditatem sibi igitur -aut, se cum tranquillitate loquuntur quod nullo, quam suum illustribus -fugiendam illis tam consequatur.. Quas maximisque impendere ipsum se -petat altera enim ocurreret sibi maxime, possit ea aegritudo aut ulla, -et quod sed. - -Verissimum confirmat accurate totam iisque sequitur aut probabo et et -adhibenda, mihi sed ad et quod erga minima rerum eius quod, tale et -libidinosarum liber, omnis quae et nunc sicine, nec at aut omnem, -sententiae a, repudiandae.. Vero esse crudelis amentur ut, atque -facilius vita invitat, delectus excepturi ex libidinum non qua -consequi beate quae ratio.. Illa poetis videor requirere, quippiam et -autem ut et esset voluptate neque consilia sed voluptatibus est -virtutum minima et, interesse exquirere et peccandi quae carere se, -angere.. Firme nomine oratio perferendis si voluptates cogitavisse, -feci maledici ea vis et, nam quae legantur animum animis temeritate, -amicitiam desideraturam tollatur nisi de voluptatem. - -Ii videri accedit de.. Graeci tum factis ea ea itaque sunt latinis -detractis reprehensiones nostrum sola non tantopere perfruique quoque -fruenda aptissimum nostrum, pueros graeca qui eruditionem est quae, -labore.. Omnia si quaerimus, si praetermissum vero deserunt quia -democriti retinere ignoratione, iam de gerendarum vel a maxime -provident, in eadem si praeterierunt, certa cibo ut utilitatibus nullo -quod voluptatis iis eamque omnia, stare aut, quamquam et, ut illa -susceperant legant consiliisque, est sed quantum igitur. diff --git a/pymode/libs/logilab-common-1.4.1/test/data/regobjects.py b/pymode/libs/logilab-common-1.4.1/test/data/regobjects.py deleted file mode 100644 index 6cea558b..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/regobjects.py +++ /dev/null @@ -1,22 +0,0 @@ -"""unittest_registry data file""" -from logilab.common.registry import yes, RegistrableObject, RegistrableInstance - -class Proxy(object): - """annoying object should that not be registered, nor cause error""" - def __getattr__(self, attr): - return 1 - -trap = Proxy() - -class AppObjectClass(RegistrableObject): - __registry__ = 'zereg' - __regid__ = 'appobject1' - __select__ = yes() - -class AppObjectInstance(RegistrableInstance): - __registry__ = 'zereg' - __select__ = yes() - def __init__(self, regid): - self.__regid__ = regid - -appobject2 = AppObjectInstance('appobject2') diff --git a/pymode/libs/logilab-common-1.4.1/test/data/regobjects2.py b/pymode/libs/logilab-common-1.4.1/test/data/regobjects2.py deleted file mode 100644 index 091b9f7d..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/regobjects2.py +++ /dev/null @@ -1,8 +0,0 @@ -from logilab.common.registry import RegistrableObject, RegistrableInstance, yes - -class MyRegistrableInstance(RegistrableInstance): - __regid__ = 'appobject3' - __select__ = yes() - __registry__ = 'zereg' - -instance = MyRegistrableInstance(__module__=__name__) diff --git a/pymode/libs/logilab-common-1.4.1/test/data/same_dir/NOTHING b/pymode/libs/logilab-common-1.4.1/test/data/same_dir/NOTHING deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/same_dir/README b/pymode/libs/logilab-common-1.4.1/test/data/same_dir/README deleted file mode 100644 index 27ab0b99..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/same_dir/README +++ /dev/null @@ -1 +0,0 @@ -thank you diff --git a/pymode/libs/logilab-common-1.4.1/test/data/same_dir/subdir/coin b/pymode/libs/logilab-common-1.4.1/test/data/same_dir/subdir/coin deleted file mode 100644 index 0e46b314..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/same_dir/subdir/coin +++ /dev/null @@ -1 +0,0 @@ -baba diff --git a/pymode/libs/logilab-common-1.4.1/test/data/same_dir/subdir/toto.txt b/pymode/libs/logilab-common-1.4.1/test/data/same_dir/subdir/toto.txt deleted file mode 100644 index 4bf7233a..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/same_dir/subdir/toto.txt +++ /dev/null @@ -1,53 +0,0 @@ -Lorem ipsum dolor sit amet, consectetuer adipisci elit. Necesse qui -quidem constituam tantis, et possunt placeat ipsum ex aut iucunde aut -facta, aut impediente autem totum unum directam eius tum voluptate -sensuum reperiuntur ad ab, quae ac.. Sed eius enim a, tranquillat ob -vexetur permagna potius voluptate eo aliae, vivamus esse solis ut non, -atomis videatur in ut, mihi litteris si ante vivere, deinde -emancipaverat appetendum sine erant ex metu philosophiae fatemur, et -magis non corpora ne, maluisti ita locupletiorem medicorum.. Tradere -imperitos exiguam in sint saluti temeritate hoc, nullam nec quaerat, -eademque vivendum, contra similique. - -Molestiae qui, tam sic ea honesto, graeca consecutionem voluptate -inertissimae sunt, corpora denique fabulis dicere ab et quae ad -politus tum in nostris.. Plane pueriliter, hoc affectus quid iis plus -videtur dolorem vivere ad esse asperiores.. Quorum si nihilo eram -pedalis pertinax ii minus, referta mediocrem iustitiam acutum quo -rerum constringendos ex pondere lucilius essent neglexerit insequitur -a tantum sit error ipso si respirare corrupte referuntur, maiorem.. -Voluptatem a etiam perspici gravissimas, cuius.. Unum morbis ne esse -conscientia tamen conclusionemque notionem, amentur quam, praeclarorum -eum consulatu iis invitat solum porro, quidem ad patria, fore res -athenis sempiternum alii venire, est mei nam improbis dolorem, -permulta timidiores. - -Et inquam sic familias, sequatur animis quae et quae ea esse, autem -impediri quaeque modo inciderint consecutionem expectata, sed severa -etiamsi, in egregios temporibus infinito ad artibus, voluptatem -aristotele, tandem aliquo industriae collegi timiditatem sibi igitur -aut, se cum tranquillitate loquuntur quod nullo, quam suum illustribus -fugiendam illis tam consequatur.. Quas maximisque impendere ipsum se -petat altera enim ocurreret sibi maxime, possit ea aegritudo aut ulla, -et quod sed. - -Verissimum confirmat accurate totam iisque sequitur aut probabo et et -adhibenda, mihi sed ad et quod erga minima rerum eius quod, tale et -libidinosarum liber, omnis quae et nunc sicine, nec at aut omnem, -sententiae a, repudiandae.. Vero esse crudelis amentur ut, atque -facilius vita invitat, delectus excepturi ex libidinum non qua -consequi beate quae ratio.. Illa poetis videor requirere, quippiam et -autem ut et esset voluptate neque consilia sed voluptatibus est -virtutum minima et, interesse exquirere et peccandi quae carere se, -angere.. Firme nomine oratio perferendis si voluptates cogitavisse, -feci maledici ea vis et, nam quae legantur animum animis temeritate, -amicitiam desideraturam tollatur nisi de voluptatem. - -Ii videri accedit de.. Graeci tum factis ea ea itaque sunt latinis -detractis reprehensiones nostrum sola non tantopere perfruique quoque -fruenda aptissimum nostrum, pueros graeca qui eruditionem est quae, -labore.. Omnia si quaerimus, si praetermissum vero deserunt quia -democriti retinere ignoratione, iam de gerendarum vel a maxime -provident, in eadem si praeterierunt, certa cibo ut utilitatibus nullo -quod voluptatis iis eamque omnia, stare aut, quamquam et, ut illa -susceperant legant consiliisque, est sed quantum igitur. diff --git a/pymode/libs/logilab-common-1.4.1/test/data/spam.txt b/pymode/libs/logilab-common-1.4.1/test/data/spam.txt deleted file mode 100644 index 068911b1..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/spam.txt +++ /dev/null @@ -1,9 +0,0 @@ -a -b -c -h -e -f -g -h - diff --git a/pymode/libs/logilab-common-1.4.1/test/data/sub/doc.txt b/pymode/libs/logilab-common-1.4.1/test/data/sub/doc.txt deleted file mode 100644 index c60eb160..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/sub/doc.txt +++ /dev/null @@ -1 +0,0 @@ -héhéhé diff --git a/pymode/libs/logilab-common-1.4.1/test/data/sub/momo.py b/pymode/libs/logilab-common-1.4.1/test/data/sub/momo.py deleted file mode 100644 index 746b5d04..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/sub/momo.py +++ /dev/null @@ -1,3 +0,0 @@ -from __future__ import print_function - -print('yo') diff --git a/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/NOTHING b/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/NOTHING deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/README b/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/README deleted file mode 100644 index 27ab0b99..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/README +++ /dev/null @@ -1 +0,0 @@ -thank you diff --git a/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/subdir/coin b/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/subdir/coin deleted file mode 100644 index 0e46b314..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/subdir/coin +++ /dev/null @@ -1 +0,0 @@ -baba diff --git a/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/subdir/toto.txt b/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/subdir/toto.txt deleted file mode 100644 index 4bf7233a..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/subdir_differ_dir/subdir/toto.txt +++ /dev/null @@ -1,53 +0,0 @@ -Lorem ipsum dolor sit amet, consectetuer adipisci elit. Necesse qui -quidem constituam tantis, et possunt placeat ipsum ex aut iucunde aut -facta, aut impediente autem totum unum directam eius tum voluptate -sensuum reperiuntur ad ab, quae ac.. Sed eius enim a, tranquillat ob -vexetur permagna potius voluptate eo aliae, vivamus esse solis ut non, -atomis videatur in ut, mihi litteris si ante vivere, deinde -emancipaverat appetendum sine erant ex metu philosophiae fatemur, et -magis non corpora ne, maluisti ita locupletiorem medicorum.. Tradere -imperitos exiguam in sint saluti temeritate hoc, nullam nec quaerat, -eademque vivendum, contra similique. - -Molestiae qui, tam sic ea honesto, graeca consecutionem voluptate -inertissimae sunt, corpora denique fabulis dicere ab et quae ad -politus tum in nostris.. Plane pueriliter, hoc affectus quid iis plus -videtur dolorem vivere ad esse asperiores.. Quorum si nihilo eram -pedalis pertinax ii minus, referta mediocrem iustitiam acutum quo -rerum constringendos ex pondere lucilius essent neglexerit insequitur -a tantum sit error ipso si respirare corrupte referuntur, maiorem.. -Voluptatem a etiam perspici gravissimas, cuius.. Unum morbis ne esse -conscientia tamen conclusionemque notionem, amentur quam, praeclarorum -eum consulatu iis invitat solum porro, quidem ad patria, fore res -athenis sempiternum alii venire, est mei nam improbis dolorem, -permulta timidiores. - -Et inquam sic familias, sequatur animis quae et quae ea esse, autem -impediri quaeque modo inciderint consecutionem expectata, sed severa -etiamsi, in egregios temporibus infinito ad artibus, voluptatem -aristotele, tandem aliquo industriae collegi timiditatem sibi igitur -aut, se cum tranquillitate loquuntur quod nullo, quam suum illustribus -fugiendam illis tam consequatur.. Quas maximisque impendere ipsum se -petat altera enim ocurreret sibi maxime, possit ea aegritudo aut ulla, -et quod sed. - -Verissimum confirmat accurate totam iisque sequitur aut probabo et et -adhibenda, mihi sed ad et quod erga minima rerum eius quod, tale et -libidinosarum liber, omnis quae et nunc sicine, nec at aut omnem, -sententiae a, repudiandae.. Vero esse crudelis amentur ut, atque -facilius vita invitat, delectus excepturi ex libidinum non qua -consequi beate quae ratio.. Illa poetis videor requirere, quippiam et -autem ut et esset voluptate neque consilia sed voluptatibus est -virtutum minima et, interesse exquirere et peccandi quae carere se, -angere.. Firme nomine oratio perferendis si voluptates cogitavisse, -feci maledici ea vis et, nam quae legantur animum animis temeritate, -amicitiam desideraturam tollatur nisi de voluptatem. - -Ii videri accedit de.. Graeci tum factis ea ea itaque sunt latinis -detractis reprehensiones nostrum sola non tantopere perfruique quoque -fruenda aptissimum nostrum, pueros graeca qui eruditionem est quae, -labore.. Omnia si quaerimus, si praetermissum vero deserunt quia -democriti retinere ignoratione, iam de gerendarum vel a maxime -provident, in eadem si praeterierunt, certa cibo ut utilitatibus nullo -quod voluptatis iis eamque omnia, stare aut, quamquam et, ut illa -susceperant legant consiliisque, est sed quantum igitur. diff --git a/pymode/libs/logilab-common-1.4.1/test/data/test.ini b/pymode/libs/logilab-common-1.4.1/test/data/test.ini deleted file mode 100644 index 3785702c..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/test.ini +++ /dev/null @@ -1,20 +0,0 @@ -# test configuration -[TEST] - -dothis=yes - -value=' ' - -# you can also document the option -multiple=yop - -number=2 - -#choice -renamed=yo - -multiple-choice=yo,ye - - -[OLD] -named=key:val diff --git a/pymode/libs/logilab-common-1.4.1/test/data/test1.msg b/pymode/libs/logilab-common-1.4.1/test/data/test1.msg deleted file mode 100644 index 33b75c83..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/test1.msg +++ /dev/null @@ -1,30 +0,0 @@ -From Nicolas.Chauvat@logilab.fr Wed Jul 20 12:03:06 2005 -Return-Path: -X-Original-To: nico@logilab.fr -Delivered-To: nico@logilab.fr -Received: from logilab.fr (crater.logilab.fr [172.17.1.4]) - by orion.logilab.fr (Postfix) with SMTP id 7D3412BDA6 - for ; Wed, 20 Jul 2005 12:03:06 +0200 (CEST) -Received: (nullmailer pid 8382 invoked by uid 1000); - Wed, 20 Jul 2005 10:03:20 -0000 -Date: Wed, 20 Jul 2005 12:03:20 +0200 -From: Nicolas Chauvat -To: Nicolas Chauvat -Subject: autre message -Message-ID: <20050720100320.GA8371@logilab.fr> -Mime-Version: 1.0 -Content-Type: text/plain; charset=utf-8 -Content-Disposition: inline -Content-Transfer-Encoding: 8bit -User-Agent: Mutt/1.5.9i -X-Spambayes-Classification: ham; 0.01 -Content-Length: 106 -Lines: 6 - -bonjour - --- -Nicolas Chauvat - -logilab.fr - services en informatique avancée et gestion de connaissances - diff --git a/pymode/libs/logilab-common-1.4.1/test/data/test2.msg b/pymode/libs/logilab-common-1.4.1/test/data/test2.msg deleted file mode 100644 index 3a5ca812..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/data/test2.msg +++ /dev/null @@ -1,42 +0,0 @@ -From alexandre.fayolle@logilab.fr Wed Jul 27 11:21:57 2005 -Date: Wed, 27 Jul 2005 11:21:57 +0200 -From: Alexandre =?iso-8859-1?Q?'d=E9couvreur?= de bugs' Fayolle -To: =?iso-8859-1?B?6WzpbWVudCDg?= accents -Subject: =?iso-8859-1?Q?=C0?= LA MER -Message-ID: <20050727092157.GB3923@logilab.fr> -Mime-Version: 1.0 -Content-Type: multipart/signed; micalg=pgp-sha1; - protocol="application/pgp-signature"; boundary="wULyF7TL5taEdwHz" -Content-Disposition: inline -User-Agent: Mutt/1.5.9i -Status: RO -Content-Length: 692 -Lines: 26 - - ---wULyF7TL5taEdwHz -Content-Type: text/plain; charset=iso-8859-1 -Content-Disposition: inline -Content-Transfer-Encoding: quoted-printable - -il s'est pass=E9 de dr=F4les de choses.=20 - ---=20 -Alexandre Fayolle LOGILAB, Paris (France). -http://www.logilab.com http://www.logilab.fr http://www.logilab.org - ---wULyF7TL5taEdwHz -Content-Type: application/pgp-signature; name="signature.asc" -Content-Description: Digital signature -Content-Disposition: inline - ------BEGIN PGP SIGNATURE----- -Version: GnuPG v1.4.1 (GNU/Linux) - -iD8DBQFC51I1Ll/b4N9npV4RAsaLAJ4k9C8Hnrjg+Q3ocrUYnYppTVcgyQCeO8yT -B7AM5XzlRD1lYqlxq+h80K8= -=zfVV ------END PGP SIGNATURE----- - ---wULyF7TL5taEdwHz-- - diff --git a/pymode/libs/logilab-common-1.4.1/test/data/write_protected_file.txt b/pymode/libs/logilab-common-1.4.1/test/data/write_protected_file.txt deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_cache.py b/pymode/libs/logilab-common-1.4.1/test/unittest_cache.py deleted file mode 100644 index 459f1720..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_cache.py +++ /dev/null @@ -1,129 +0,0 @@ -# unit tests for the cache module -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . - -from logilab.common.testlib import TestCase, unittest_main, TestSuite -from logilab.common.cache import Cache - -class CacheTestCase(TestCase): - - def setUp(self): - self.cache = Cache(5) - self.testdict = {} - - def test_setitem1(self): - """Checks that the setitem method works""" - self.cache[1] = 'foo' - self.assertEqual(self.cache[1], 'foo', "1:foo is not in cache") - self.assertEqual(len(self.cache._usage), 1) - self.assertEqual(self.cache._usage[-1], 1, - '1 is not the most recently used key') - self.assertCountEqual(self.cache._usage, - self.cache.keys(), - "usage list and data keys are different") - - def test_setitem2(self): - """Checks that the setitem method works for multiple items""" - self.cache[1] = 'foo' - self.cache[2] = 'bar' - self.assertEqual(self.cache[2], 'bar', - "2 : 'bar' is not in cache.data") - self.assertEqual(len(self.cache._usage), 2, - "lenght of usage list is not 2") - self.assertEqual(self.cache._usage[-1], 2, - '1 is not the most recently used key') - self.assertCountEqual(self.cache._usage, - self.cache.keys())# usage list and data keys are different - - def test_setitem3(self): - """Checks that the setitem method works when replacing an element in the cache""" - self.cache[1] = 'foo' - self.cache[1] = 'bar' - self.assertEqual(self.cache[1], 'bar', "1 : 'bar' is not in cache.data") - self.assertEqual(len(self.cache._usage), 1, "lenght of usage list is not 1") - self.assertEqual(self.cache._usage[-1], 1, '1 is not the most recently used key') - self.assertCountEqual(self.cache._usage, - self.cache.keys())# usage list and data keys are different - - def test_recycling1(self): - """Checks the removal of old elements""" - self.cache[1] = 'foo' - self.cache[2] = 'bar' - self.cache[3] = 'baz' - self.cache[4] = 'foz' - self.cache[5] = 'fuz' - self.cache[6] = 'spam' - self.assertTrue(1 not in self.cache, - 'key 1 has not been suppressed from the cache dictionnary') - self.assertTrue(1 not in self.cache._usage, - 'key 1 has not been suppressed from the cache LRU list') - self.assertEqual(len(self.cache._usage), 5, "lenght of usage list is not 5") - self.assertEqual(self.cache._usage[-1], 6, '6 is not the most recently used key') - self.assertCountEqual(self.cache._usage, - self.cache.keys())# usage list and data keys are different - - def test_recycling2(self): - """Checks that accessed elements get in the front of the list""" - self.cache[1] = 'foo' - self.cache[2] = 'bar' - self.cache[3] = 'baz' - self.cache[4] = 'foz' - a = self.cache[1] - self.assertEqual(a, 'foo') - self.assertEqual(self.cache._usage[-1], 1, '1 is not the most recently used key') - self.assertCountEqual(self.cache._usage, - self.cache.keys())# usage list and data keys are different - - def test_delitem(self): - """Checks that elements are removed from both element dict and element - list. - """ - self.cache['foo'] = 'bar' - del self.cache['foo'] - self.assertTrue('foo' not in self.cache.keys(), "Element 'foo' was not removed cache dictionnary") - self.assertTrue('foo' not in self.cache._usage, "Element 'foo' was not removed usage list") - self.assertCountEqual(self.cache._usage, - self.cache.keys())# usage list and data keys are different - - - def test_nullsize(self): - """Checks that a 'NULL' size cache doesn't store anything - """ - null_cache = Cache(0) - null_cache['foo'] = 'bar' - self.assertEqual(null_cache.size, 0, 'Cache size should be O, not %d' % \ - null_cache.size) - self.assertEqual(len(null_cache), 0, 'Cache should be empty !') - # Assert null_cache['foo'] raises a KeyError - self.assertRaises(KeyError, null_cache.__getitem__, 'foo') - # Deleting element raises a KeyError - self.assertRaises(KeyError, null_cache.__delitem__, 'foo') - - def test_getitem(self): - """ Checks that getitem doest not modify the _usage attribute - """ - try: - self.cache['toto'] - except KeyError: - self.assertTrue('toto' not in self.cache._usage) - else: - self.fail('excepted KeyError') - - -if __name__ == "__main__": - unittest_main() diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_changelog.py b/pymode/libs/logilab-common-1.4.1/test/unittest_changelog.py deleted file mode 100644 index c2572d70..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_changelog.py +++ /dev/null @@ -1,40 +0,0 @@ -# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with logilab-common. If not, see . - -from os.path import join, dirname - -from io import StringIO -from logilab.common.testlib import TestCase, unittest_main - -from logilab.common.changelog import ChangeLog - - -class ChangeLogTC(TestCase): - cl_class = ChangeLog - cl_file = join(dirname(__file__), 'data', 'ChangeLog') - - def test_round_trip(self): - cl = self.cl_class(self.cl_file) - out = StringIO() - cl.write(out) - with open(self.cl_file) as stream: - self.assertMultiLineEqual(stream.read(), out.getvalue()) - - -if __name__ == '__main__': - unittest_main() diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_configuration.py b/pymode/libs/logilab-common-1.4.1/test/unittest_configuration.py deleted file mode 100644 index ea7cdca6..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_configuration.py +++ /dev/null @@ -1,509 +0,0 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -import tempfile -import os -from os.path import join, dirname, abspath -import re - -from sys import version_info - -from six import integer_types - -from logilab.common import attrdict -from logilab.common.compat import StringIO -from logilab.common.testlib import TestCase, unittest_main -from logilab.common.optik_ext import OptionValueError -from logilab.common.configuration import Configuration, OptionError, \ - OptionsManagerMixIn, OptionsProviderMixIn, Method, read_old_config, \ - merge_options - -DATA = join(dirname(abspath(__file__)), 'data') - -OPTIONS = [('dothis', {'type':'yn', 'action': 'store', 'default': True, 'metavar': ''}), - ('value', {'type': 'string', 'metavar': '', 'short': 'v'}), - ('multiple', {'type': 'csv', 'default': ['yop', 'yep'], - 'metavar': '', - 'help': 'you can also document the option'}), - ('number', {'type': 'int', 'default':2, 'metavar':'', 'help': 'boom'}), - ('bytes', {'type': 'bytes', 'default':'1KB', 'metavar':''}), - ('choice', {'type': 'choice', 'default':'yo', 'choices': ('yo', 'ye'), - 'metavar':''}), - ('multiple-choice', {'type': 'multiple_choice', 'default':['yo', 'ye'], - 'choices': ('yo', 'ye', 'yu', 'yi', 'ya'), - 'metavar':''}), - ('named', {'type':'named', 'default':Method('get_named'), - 'metavar': ''}), - - ('diffgroup', {'type':'string', 'default':'pouet', 'metavar': '', - 'group': 'agroup'}), - ('reset-value', {'type': 'string', 'metavar': '', 'short': 'r', - 'dest':'value'}), - - ('opt-b-1', {'type': 'string', 'metavar': '', 'group': 'bgroup'}), - ('opt-b-2', {'type': 'string', 'metavar': '', 'group': 'bgroup'}), - ] - -class MyConfiguration(Configuration): - """test configuration""" - def get_named(self): - return {'key': 'val'} - -class ConfigurationTC(TestCase): - - def setUp(self): - self.cfg = MyConfiguration(name='test', options=OPTIONS, usage='Just do it ! (tm)') - - def test_default(self): - cfg = self.cfg - self.assertEqual(cfg['dothis'], True) - self.assertEqual(cfg['value'], None) - self.assertEqual(cfg['multiple'], ['yop', 'yep']) - self.assertEqual(cfg['number'], 2) - self.assertEqual(cfg['bytes'], 1024) - self.assertIsInstance(cfg['bytes'], integer_types) - self.assertEqual(cfg['choice'], 'yo') - self.assertEqual(cfg['multiple-choice'], ['yo', 'ye']) - self.assertEqual(cfg['named'], {'key': 'val'}) - - def test_base(self): - cfg = self.cfg - cfg.set_option('number', '0') - self.assertEqual(cfg['number'], 0) - self.assertRaises(OptionValueError, cfg.set_option, 'number', 'youpi') - self.assertRaises(OptionValueError, cfg.set_option, 'choice', 'youpi') - self.assertRaises(OptionValueError, cfg.set_option, 'multiple-choice', ('yo', 'y', 'ya')) - cfg.set_option('multiple-choice', 'yo, ya') - self.assertEqual(cfg['multiple-choice'], ['yo', 'ya']) - self.assertEqual(cfg.get('multiple-choice'), ['yo', 'ya']) - self.assertEqual(cfg.get('whatever'), None) - - def test_load_command_line_configuration(self): - cfg = self.cfg - args = cfg.load_command_line_configuration(['--choice', 'ye', '--number', '4', - '--multiple=1,2,3', '--dothis=n', - '--bytes=10KB', - 'other', 'arguments']) - self.assertEqual(args, ['other', 'arguments']) - self.assertEqual(cfg['dothis'], False) - self.assertEqual(cfg['multiple'], ['1', '2', '3']) - self.assertEqual(cfg['number'], 4) - self.assertEqual(cfg['bytes'], 10240) - self.assertEqual(cfg['choice'], 'ye') - self.assertEqual(cfg['value'], None) - args = cfg.load_command_line_configuration(['-v', 'duh']) - self.assertEqual(args, []) - self.assertEqual(cfg['value'], 'duh') - self.assertEqual(cfg['dothis'], False) - self.assertEqual(cfg['multiple'], ['1', '2', '3']) - self.assertEqual(cfg['number'], 4) - self.assertEqual(cfg['bytes'], 10240) - self.assertEqual(cfg['choice'], 'ye') - - def test_load_configuration(self): - cfg = self.cfg - args = cfg.load_configuration(choice='ye', number='4', - multiple='1,2,3', dothis='n', - multiple_choice=('yo', 'ya')) - self.assertEqual(cfg['dothis'], False) - self.assertEqual(cfg['multiple'], ['1', '2', '3']) - self.assertEqual(cfg['number'], 4) - self.assertEqual(cfg['choice'], 'ye') - self.assertEqual(cfg['value'], None) - self.assertEqual(cfg['multiple-choice'], ('yo', 'ya')) - - def test_load_configuration_file_case_insensitive(self): - file = tempfile.mktemp() - stream = open(file, 'w') - try: - stream.write("""[Test] - -dothis=no - -#value= - -# you can also document the option -multiple=yop,yepii - -# boom -number=3 - -bytes=1KB - -choice=yo - -multiple-choice=yo,ye - -named=key:val - - -[agroup] - -diffgroup=zou -""") - stream.close() - self.cfg.load_file_configuration(file) - self.assertEqual(self.cfg['dothis'], False) - self.assertEqual(self.cfg['value'], None) - self.assertEqual(self.cfg['multiple'], ['yop', 'yepii']) - self.assertEqual(self.cfg['diffgroup'], 'zou') - finally: - os.remove(file) - - def test_option_order(self): - """ Check that options are taken into account in the command line order - and not in the order they are defined in the Configuration object. - """ - file = tempfile.mktemp() - stream = open(file, 'w') - try: - stream.write("""[Test] -reset-value=toto -value=tata -""") - stream.close() - self.cfg.load_file_configuration(file) - finally: - os.remove(file) - self.assertEqual(self.cfg['value'], 'tata') - - def test_unsupported_options(self): - file = tempfile.mktemp() - stream = open(file, 'w') - try: - stream.write("""[Test] -whatever=toto -value=tata -""") - stream.close() - self.cfg.load_file_configuration(file) - finally: - os.remove(file) - self.assertEqual(self.cfg['value'], 'tata') - self.assertRaises(OptionError, self.cfg.__getitem__, 'whatever') - - def test_generate_config(self): - stream = StringIO() - self.cfg.generate_config(stream) - self.assertMultiLineEqual(stream.getvalue().strip(), """[TEST] - -dothis=yes - -#value= - -# you can also document the option -multiple=yop,yep - -# boom -number=2 - -bytes=1KB - -choice=yo - -multiple-choice=yo,ye - -named=key:val - -#reset-value= - - -[AGROUP] - -diffgroup=pouet - - -[BGROUP] - -#opt-b-1= - -#opt-b-2=""") - - def test_generate_config_with_space_string(self): - self.cfg['value'] = ' ' - stream = StringIO() - self.cfg.generate_config(stream) - self.assertMultiLineEqual(stream.getvalue().strip(), """[TEST] - -dothis=yes - -value=' ' - -# you can also document the option -multiple=yop,yep - -# boom -number=2 - -bytes=1KB - -choice=yo - -multiple-choice=yo,ye - -named=key:val - -reset-value=' ' - - -[AGROUP] - -diffgroup=pouet - - -[BGROUP] - -#opt-b-1= - -#opt-b-2=""") - - def test_generate_config_with_multiline_string(self): - self.cfg['value'] = 'line1\nline2\nline3' - stream = StringIO() - self.cfg.generate_config(stream) - self.assertMultiLineEqual(stream.getvalue().strip(), """[TEST] - -dothis=yes - -value= - line1 - line2 - line3 - -# you can also document the option -multiple=yop,yep - -# boom -number=2 - -bytes=1KB - -choice=yo - -multiple-choice=yo,ye - -named=key:val - -reset-value= - line1 - line2 - line3 - - -[AGROUP] - -diffgroup=pouet - - -[BGROUP] - -#opt-b-1= - -#opt-b-2=""") - - - def test_roundtrip(self): - cfg = self.cfg - f = tempfile.mktemp() - stream = open(f, 'w') - try: - self.cfg['dothis'] = False - self.cfg['multiple'] = ["toto", "tata"] - self.cfg['number'] = 3 - self.cfg['bytes'] = 2048 - cfg.generate_config(stream) - stream.close() - new_cfg = MyConfiguration(name='test', options=OPTIONS) - new_cfg.load_file_configuration(f) - self.assertEqual(cfg['dothis'], new_cfg['dothis']) - self.assertEqual(cfg['multiple'], new_cfg['multiple']) - self.assertEqual(cfg['number'], new_cfg['number']) - self.assertEqual(cfg['bytes'], new_cfg['bytes']) - self.assertEqual(cfg['choice'], new_cfg['choice']) - self.assertEqual(cfg['value'], new_cfg['value']) - self.assertEqual(cfg['multiple-choice'], new_cfg['multiple-choice']) - finally: - os.remove(f) - - def test_setitem(self): - self.assertRaises(OptionValueError, - self.cfg.__setitem__, 'multiple-choice', ('a', 'b')) - self.cfg['multiple-choice'] = ('yi', 'ya') - self.assertEqual(self.cfg['multiple-choice'], ('yi', 'ya')) - - def test_help(self): - self.cfg.add_help_section('bonus', 'a nice additional help') - help = self.cfg.help().strip() - # at least in python 2.4.2 the output is: - # ' -v , --value=' - # it is not unlikely some optik/optparse versions do print -v - # so accept both - help = help.replace(' -v , ', ' -v, ') - help = re.sub('[ ]*(\r?\n)', '\\1', help) - USAGE = """Usage: Just do it ! (tm) - -Options: - -h, --help show this help message and exit - --dothis= - -v, --value= - --multiple= - you can also document the option [current: yop,yep] - --number= boom [current: 2] - --bytes= - --choice= - --multiple-choice= - --named= - -r , --reset-value= - - Agroup: - --diffgroup= - - Bgroup: - --opt-b-1= - --opt-b-2= - - Bonus: - a nice additional help""" - if version_info < (2, 5): - # 'usage' header is not capitalized in this version - USAGE = USAGE.replace('Usage: ', 'usage: ') - elif version_info < (2, 4): - USAGE = """usage: Just do it ! (tm) - -options: - -h, --help show this help message and exit - --dothis= - -v, --value= - --multiple= - you can also document the option - --number= - --choice= - --multiple-choice= - --named= - - Bonus: - a nice additional help -""" - self.assertMultiLineEqual(help, USAGE) - - - def test_manpage(self): - pkginfo = {} - with open(join(DATA, '__pkginfo__.py')) as fobj: - exec(fobj.read(), pkginfo) - self.cfg.generate_manpage(attrdict(pkginfo), stream=StringIO()) - - def test_rewrite_config(self): - changes = [('renamed', 'renamed', 'choice'), - ('moved', 'named', 'old', 'test'), - ] - read_old_config(self.cfg, changes, join(DATA, 'test.ini')) - stream = StringIO() - self.cfg.generate_config(stream) - self.assertMultiLineEqual(stream.getvalue().strip(), """[TEST] - -dothis=yes - -value=' ' - -# you can also document the option -multiple=yop - -# boom -number=2 - -bytes=1KB - -choice=yo - -multiple-choice=yo,ye - -named=key:val - -reset-value=' ' - - -[AGROUP] - -diffgroup=pouet - - -[BGROUP] - -#opt-b-1= - -#opt-b-2=""") - -class Linter(OptionsManagerMixIn, OptionsProviderMixIn): - options = ( - ('profile', {'type' : 'yn', 'metavar' : '', - 'default': False, - 'help' : 'Profiled execution.'}), - ) - def __init__(self): - OptionsManagerMixIn.__init__(self, usage="") - OptionsProviderMixIn.__init__(self) - self.register_options_provider(self) - self.load_provider_defaults() - -class RegrTC(TestCase): - - def setUp(self): - self.linter = Linter() - - def test_load_defaults(self): - self.linter.load_command_line_configuration([]) - self.assertEqual(self.linter.config.profile, False) - - def test_register_options_multiple_groups(self): - """ensure multiple option groups can be registered at once""" - config = Configuration() - self.assertEqual(config.options, ()) - new_options = ( - ('option1', {'type': 'string', 'help': '', - 'group': 'g1', 'level': 2}), - ('option2', {'type': 'string', 'help': '', - 'group': 'g1', 'level': 2}), - ('option3', {'type': 'string', 'help': '', - 'group': 'g2', 'level': 2}), - ) - config.register_options(new_options) - self.assertEqual(config.options, new_options) - - -class MergeTC(TestCase): - - def test_merge1(self): - merged = merge_options([('dothis', {'type':'yn', 'action': 'store', 'default': True, 'metavar': ''}), - ('dothis', {'type':'yn', 'action': 'store', 'default': False, 'metavar': ''}), - ]) - self.assertEqual(len(merged), 1) - self.assertEqual(merged[0][0], 'dothis') - self.assertEqual(merged[0][1]['default'], True) - - def test_merge2(self): - merged = merge_options([('dothis', {'type':'yn', 'action': 'store', 'default': True, 'metavar': ''}), - ('value', {'type': 'string', 'metavar': '', 'short': 'v'}), - ('dothis', {'type':'yn', 'action': 'store', 'default': False, 'metavar': ''}), - ]) - self.assertEqual(len(merged), 2) - self.assertEqual(merged[0][0], 'value') - self.assertEqual(merged[1][0], 'dothis') - self.assertEqual(merged[1][1]['default'], True) - -if __name__ == '__main__': - unittest_main() diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_date.py b/pymode/libs/logilab-common-1.4.1/test/unittest_date.py deleted file mode 100644 index 9ae444bb..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_date.py +++ /dev/null @@ -1,206 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -""" -Unittests for date helpers -""" -from logilab.common.testlib import TestCase, unittest_main, tag - -from logilab.common.date import (date_range, endOfMonth, add_days_worked, - nb_open_days, get_national_holidays, ustrftime, ticks2datetime, - utcdatetime, datetime2ticks) - -from datetime import date, datetime, timedelta -from calendar import timegm -import pytz - -try: - from mx.DateTime import Date as mxDate, DateTime as mxDateTime, \ - now as mxNow, RelativeDateTime, RelativeDate -except ImportError: - mxDate = mxDateTime = RelativeDateTime = mxNow = None - -class DateTC(TestCase): - datecls = date - datetimecls = datetime - timedeltacls = timedelta - now = datetime.now - - def test_day(self): - """enumerate days""" - r = list(date_range(self.datecls(2000, 1, 1), self.datecls(2000, 1, 4))) - expected = [self.datecls(2000, 1, 1), self.datecls(2000, 1, 2), self.datecls(2000, 1, 3)] - self.assertListEqual(r, expected) - r = list(date_range(self.datecls(2000, 1, 31), self.datecls(2000, 2, 3))) - expected = [self.datecls(2000, 1, 31), self.datecls(2000, 2, 1), self.datecls(2000, 2, 2)] - self.assertListEqual(r, expected) - r = list(date_range(self.datecls(2000, 1, 1), self.datecls(2000, 1, 6), 2)) - expected = [self.datecls(2000, 1, 1), self.datecls(2000, 1, 3), self.datecls(2000, 1, 5)] - self.assertListEqual(r, expected) - - def test_add_days_worked(self): - add = add_days_worked - # normal - self.assertEqual(add(self.datecls(2008, 1, 3), 1), self.datecls(2008, 1, 4)) - # skip week-end - self.assertEqual(add(self.datecls(2008, 1, 3), 2), self.datecls(2008, 1, 7)) - # skip 2 week-ends - self.assertEqual(add(self.datecls(2008, 1, 3), 8), self.datecls(2008, 1, 15)) - # skip holiday + week-end - self.assertEqual(add(self.datecls(2008, 4, 30), 2), self.datecls(2008, 5, 5)) - - def test_get_national_holidays(self): - holidays = get_national_holidays - yield self.assertEqual, holidays(self.datecls(2008, 4, 29), self.datecls(2008, 5, 2)), \ - [self.datecls(2008, 5, 1)] - yield self.assertEqual, holidays(self.datecls(2008, 5, 7), self.datecls(2008, 5, 8)), [] - x = self.datetimecls(2008, 5, 7, 12, 12, 12) - yield self.assertEqual, holidays(x, x + self.timedeltacls(days=1)), [] - - def test_open_days_now_and_before(self): - nb = nb_open_days - x = self.now() - y = x - self.timedeltacls(seconds=1) - self.assertRaises(AssertionError, nb, x, y) - - def assertOpenDays(self, start, stop, expected): - got = nb_open_days(start, stop) - self.assertEqual(got, expected) - - def test_open_days_tuesday_friday(self): - self.assertOpenDays(self.datecls(2008, 3, 4), self.datecls(2008, 3, 7), 3) - - def test_open_days_day_nextday(self): - self.assertOpenDays(self.datecls(2008, 3, 4), self.datecls(2008, 3, 5), 1) - - def test_open_days_friday_monday(self): - self.assertOpenDays(self.datecls(2008, 3, 7), self.datecls(2008, 3, 10), 1) - - def test_open_days_friday_monday_with_two_weekends(self): - self.assertOpenDays(self.datecls(2008, 3, 7), self.datecls(2008, 3, 17), 6) - - def test_open_days_tuesday_wednesday(self): - """week-end + easter monday""" - self.assertOpenDays(self.datecls(2008, 3, 18), self.datecls(2008, 3, 26), 5) - - def test_open_days_friday_saturday(self): - self.assertOpenDays(self.datecls(2008, 3, 7), self.datecls(2008, 3, 8), 1) - - def test_open_days_friday_sunday(self): - self.assertOpenDays(self.datecls(2008, 3, 7), self.datecls(2008, 3, 9), 1) - - def test_open_days_saturday_sunday(self): - self.assertOpenDays(self.datecls(2008, 3, 8), self.datecls(2008, 3, 9), 0) - - def test_open_days_saturday_monday(self): - self.assertOpenDays(self.datecls(2008, 3, 8), self.datecls(2008, 3, 10), 0) - - def test_open_days_saturday_tuesday(self): - self.assertOpenDays(self.datecls(2008, 3, 8), self.datecls(2008, 3, 11), 1) - - def test_open_days_now_now(self): - x = self.now() - self.assertOpenDays(x, x, 0) - - def test_open_days_now_now2(self): - x = self.datetimecls(2010, 5, 24) - self.assertOpenDays(x, x, 0) - - def test_open_days_afternoon_before_holiday(self): - self.assertOpenDays(self.datetimecls(2008, 5, 7, 14), self.datetimecls(2008, 5, 8, 0), 1) - - def test_open_days_afternoon_before_saturday(self): - self.assertOpenDays(self.datetimecls(2008, 5, 9, 14), self.datetimecls(2008, 5, 10, 14), 1) - - def test_open_days_afternoon(self): - self.assertOpenDays(self.datetimecls(2008, 5, 6, 14), self.datetimecls(2008, 5, 7, 14), 1) - - @tag('posix', '1900') - def test_ustrftime_before_1900(self): - date = self.datetimecls(1328, 3, 12, 6, 30) - self.assertEqual(ustrftime(date, '%Y-%m-%d %H:%M:%S'), u'1328-03-12 06:30:00') - - @tag('posix', '1900') - def test_ticks2datetime_before_1900(self): - ticks = -2209075200000 - date = ticks2datetime(ticks) - self.assertEqual(ustrftime(date, '%Y-%m-%d'), u'1899-12-31') - - def test_month(self): - """enumerate months""" - r = list(date_range(self.datecls(2006, 5, 6), self.datecls(2006, 8, 27), - incmonth=True)) - expected = [self.datecls(2006, 5, 6), self.datecls(2006, 6, 1), self.datecls(2006, 7, 1), self.datecls(2006, 8, 1)] - self.assertListEqual(expected, r) - - def test_utcdatetime(self): - if self.datetimecls is mxDateTime: - return - d = self.datetimecls(2014, 11, 26, 12, 0, 0, 57, tzinfo=pytz.utc) - d = utcdatetime(d) - self.assertEqual(d, self.datetimecls(2014, 11, 26, 12, 0, 0, 57)) - self.assertIsNone(d.tzinfo) - - d = pytz.timezone('Europe/Paris').localize( - self.datetimecls(2014, 11, 26, 12, 0, 0, 57)) - d = utcdatetime(d) - self.assertEqual(d, self.datetimecls(2014, 11, 26, 11, 0, 0, 57)) - self.assertIsNone(d.tzinfo) - - d = pytz.timezone('Europe/Paris').localize( - self.datetimecls(2014, 7, 26, 12, 0, 0, 57)) - d = utcdatetime(d) - self.assertEqual(d, self.datetimecls(2014, 7, 26, 10, 0, 0, 57)) - self.assertIsNone(d.tzinfo) - - def test_datetime2ticks(self): - d = datetime(2014, 11, 26, 12, 0, 0, 57, tzinfo=pytz.utc) - timestamp = timegm(d.timetuple()) - self.assertEqual(datetime2ticks(d), timestamp * 1000) - d = d.replace(microsecond=123456) - self.assertEqual(datetime2ticks(d), timestamp * 1000 + 123) - - def test_datetime2ticks_date_argument(self): - d = date(2014, 11, 26) - timestamp = timegm(d.timetuple()) - self.assertEqual(datetime2ticks(d), timestamp * 1000) - - -class MxDateTC(DateTC): - datecls = mxDate - datetimecls = mxDateTime - timedeltacls = RelativeDateTime - now = mxNow - - def check_mx(self): - if mxDate is None: - self.skipTest('mx.DateTime is not installed') - - def setUp(self): - self.check_mx() - - def test_month(self): - """enumerate months""" - r = list(date_range(self.datecls(2000, 1, 2), self.datecls(2000, 4, 4), endOfMonth)) - expected = [self.datecls(2000, 1, 2), self.datecls(2000, 2, 29), self.datecls(2000, 3, 31)] - self.assertListEqual(r, expected) - r = list(date_range(self.datecls(2000, 11, 30), self.datecls(2001, 2, 3), endOfMonth)) - expected = [self.datecls(2000, 11, 30), self.datecls(2000, 12, 31), self.datecls(2001, 1, 31)] - self.assertListEqual(r, expected) - -if __name__ == '__main__': - unittest_main() diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_decorators.py b/pymode/libs/logilab-common-1.4.1/test/unittest_decorators.py deleted file mode 100644 index e97a56f2..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_decorators.py +++ /dev/null @@ -1,208 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""unit tests for the decorators module -""" -import sys -import types - -from logilab.common.testlib import TestCase, unittest_main -from logilab.common.decorators import (monkeypatch, cached, clear_cache, - copy_cache, cachedproperty) - -class DecoratorsTC(TestCase): - - def test_monkeypatch_instance_method(self): - class MyClass: pass - @monkeypatch(MyClass) - def meth1(self): - return 12 - class XXX(object): - @monkeypatch(MyClass) - def meth2(self): - return 12 - if sys.version_info < (3, 0): - self.assertIsInstance(MyClass.meth1, types.MethodType) - self.assertIsInstance(MyClass.meth2, types.MethodType) - else: - # with python3, unbound method are functions - self.assertIsInstance(MyClass.meth1, types.FunctionType) - self.assertIsInstance(MyClass.meth2, types.FunctionType) - self.assertEqual(MyClass().meth1(), 12) - self.assertEqual(MyClass().meth2(), 12) - - def test_monkeypatch_property(self): - class MyClass: pass - @monkeypatch(MyClass, methodname='prop1') - @property - def meth1(self): - return 12 - self.assertIsInstance(MyClass.prop1, property) - self.assertEqual(MyClass().prop1, 12) - - def test_monkeypatch_arbitrary_callable(self): - class MyClass: pass - class ArbitraryCallable(object): - def __call__(self): - return 12 - # ensure it complains about missing __name__ - with self.assertRaises(AttributeError) as cm: - monkeypatch(MyClass)(ArbitraryCallable()) - self.assertTrue(str(cm.exception).endswith('has no __name__ attribute: you should provide an explicit `methodname`')) - # ensure no black magic under the hood - monkeypatch(MyClass, 'foo')(ArbitraryCallable()) - self.assertTrue(callable(MyClass.foo)) - self.assertEqual(MyClass().foo(), 12) - - def test_monkeypatch_with_same_name(self): - class MyClass: pass - @monkeypatch(MyClass) - def meth1(self): - return 12 - self.assertEqual([attr for attr in dir(MyClass) if attr[:2] != '__'], - ['meth1']) - inst = MyClass() - self.assertEqual(inst.meth1(), 12) - - def test_monkeypatch_with_custom_name(self): - class MyClass: pass - @monkeypatch(MyClass, 'foo') - def meth2(self, param): - return param + 12 - self.assertEqual([attr for attr in dir(MyClass) if attr[:2] != '__'], - ['foo']) - inst = MyClass() - self.assertEqual(inst.foo(4), 16) - - def test_cannot_cache_generator(self): - def foo(): - yield 42 - self.assertRaises(AssertionError, cached, foo) - - def test_cached_preserves_docstrings_and_name(self): - class Foo(object): - @cached - def foo(self): - """ what's up doc ? """ - def bar(self, zogzog): - """ what's up doc ? """ - bar = cached(bar, 1) - @cached - def quux(self, zogzog): - """ what's up doc ? """ - self.assertEqual(Foo.foo.__doc__, """ what's up doc ? """) - self.assertEqual(Foo.foo.__name__, 'foo') - self.assertEqual(Foo.bar.__doc__, """ what's up doc ? """) - self.assertEqual(Foo.bar.__name__, 'bar') - self.assertEqual(Foo.quux.__doc__, """ what's up doc ? """) - self.assertEqual(Foo.quux.__name__, 'quux') - - def test_cached_single_cache(self): - class Foo(object): - @cached(cacheattr=u'_foo') - def foo(self): - """ what's up doc ? """ - foo = Foo() - foo.foo() - self.assertTrue(hasattr(foo, '_foo')) - clear_cache(foo, 'foo') - self.assertFalse(hasattr(foo, '_foo')) - - def test_cached_multi_cache(self): - class Foo(object): - @cached(cacheattr=u'_foo') - def foo(self, args): - """ what's up doc ? """ - foo = Foo() - foo.foo(1) - self.assertEqual(foo._foo, {(1,): None}) - clear_cache(foo, 'foo') - self.assertFalse(hasattr(foo, '_foo')) - - def test_cached_keyarg_cache(self): - class Foo(object): - @cached(cacheattr=u'_foo', keyarg=1) - def foo(self, other, args): - """ what's up doc ? """ - foo = Foo() - foo.foo(2, 1) - self.assertEqual(foo._foo, {2: None}) - clear_cache(foo, 'foo') - self.assertFalse(hasattr(foo, '_foo')) - - def test_cached_property(self): - class Foo(object): - @property - @cached(cacheattr=u'_foo') - def foo(self): - """ what's up doc ? """ - foo = Foo() - foo.foo - self.assertEqual(foo._foo, None) - clear_cache(foo, 'foo') - self.assertFalse(hasattr(foo, '_foo')) - - def test_copy_cache(self): - class Foo(object): - @cached(cacheattr=u'_foo') - def foo(self, args): - """ what's up doc ? """ - foo = Foo() - foo.foo(1) - self.assertEqual(foo._foo, {(1,): None}) - foo2 = Foo() - self.assertFalse(hasattr(foo2, '_foo')) - copy_cache(foo2, 'foo', foo) - self.assertEqual(foo2._foo, {(1,): None}) - - - def test_cachedproperty(self): - class Foo(object): - x = 0 - @cachedproperty - def bar(self): - self.__class__.x += 1 - return self.__class__.x - @cachedproperty - def quux(self): - """ some prop """ - return 42 - - foo = Foo() - self.assertEqual(Foo.x, 0) - self.assertFalse('bar' in foo.__dict__) - self.assertEqual(foo.bar, 1) - self.assertTrue('bar' in foo.__dict__) - self.assertEqual(foo.bar, 1) - self.assertEqual(foo.quux, 42) - self.assertEqual(Foo.bar.__doc__, - '') - self.assertEqual(Foo.quux.__doc__, - '\n some prop ') - - foo2 = Foo() - self.assertEqual(foo2.bar, 2) - # make sure foo.foo is cached - self.assertEqual(foo.bar, 1) - - class Kallable(object): - def __call__(self): - return 42 - self.assertRaises(TypeError, cachedproperty, Kallable()) - -if __name__ == '__main__': - unittest_main() diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_deprecation.py b/pymode/libs/logilab-common-1.4.1/test/unittest_deprecation.py deleted file mode 100644 index b0f8a1aa..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_deprecation.py +++ /dev/null @@ -1,147 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""unit tests for logilab.common.deprecation""" - -import warnings - -from six import add_metaclass - -from logilab.common.testlib import TestCase, unittest_main -from logilab.common import deprecation - - -class RawInputTC(TestCase): - - # XXX with 2.6 we could test warnings - # http://docs.python.org/library/warnings.html#testing-warnings - # instead we just make sure it does not crash - - def mock_warn(self, *args, **kwargs): - self.messages.append(args[0]) - - def setUp(self): - self.messages = [] - deprecation.warn = self.mock_warn - - def tearDown(self): - deprecation.warn = warnings.warn - - def mk_func(self): - def any_func(): - pass - return any_func - - def test_class_deprecated(self): - @add_metaclass(deprecation.class_deprecated) - class AnyClass(object): - pass - AnyClass() - self.assertEqual(self.messages, - ['AnyClass is deprecated']) - - def test_deprecated_func(self): - any_func = deprecation.deprecated()(self.mk_func()) - any_func() - any_func = deprecation.deprecated('message')(self.mk_func()) - any_func() - self.assertEqual(self.messages, - ['The function "any_func" is deprecated', 'message']) - - def test_deprecated_decorator(self): - @deprecation.deprecated() - def any_func(): - pass - any_func() - @deprecation.deprecated('message') - def any_func(): - pass - any_func() - self.assertEqual(self.messages, - ['The function "any_func" is deprecated', 'message']) - - def test_moved(self): - module = 'data.deprecation' - any_func = deprecation.moved(module, 'moving_target') - any_func() - self.assertEqual(self.messages, - ['object moving_target has been moved to module data.deprecation']) - - def test_deprecated_manager(self): - deprecator = deprecation.DeprecationManager("module_name") - deprecator.compatibility('1.3') - # This warn should be printed. - deprecator.warn('1.1', "Major deprecation message.", 1) - deprecator.warn('1.1') - - @deprecator.deprecated('1.2', 'Major deprecation message.') - def any_func(): - pass - any_func() - - @deprecator.deprecated('1.2') - def other_func(): - pass - other_func() - - self.assertListEqual(self.messages, - ['[module_name 1.1] Major deprecation message.', - '[module_name 1.1] ', - '[module_name 1.2] Major deprecation message.', - '[module_name 1.2] The function "other_func" is deprecated']) - - def test_class_deprecated_manager(self): - deprecator = deprecation.DeprecationManager("module_name") - deprecator.compatibility('1.3') - @add_metaclass(deprecator.class_deprecated('1.2')) - class AnyClass(object): - pass - AnyClass() - self.assertEqual(self.messages, - ['[module_name 1.2] AnyClass is deprecated']) - - - def test_deprecated_manager_noprint(self): - deprecator = deprecation.DeprecationManager("module_name") - deprecator.compatibility('1.3') - # This warn should not be printed. - deprecator.warn('1.3', "Minor deprecation message.", 1) - - @deprecator.deprecated('1.3', 'Minor deprecation message.') - def any_func(): - pass - any_func() - - @deprecator.deprecated('1.20') - def other_func(): - pass - other_func() - - @deprecator.deprecated('1.4') - def other_func(): - pass - other_func() - - class AnyClass(object): - __metaclass__ = deprecator.class_deprecated((1,5)) - AnyClass() - - self.assertFalse(self.messages) - - -if __name__ == '__main__': - unittest_main() diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_fileutils.py b/pymode/libs/logilab-common-1.4.1/test/unittest_fileutils.py deleted file mode 100644 index 555e73f4..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_fileutils.py +++ /dev/null @@ -1,146 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""unit tests for logilab.common.fileutils""" - -import doctest -import io -import sys, os, tempfile, shutil -from stat import S_IWRITE -from os.path import join - -from logilab.common.testlib import TestCase, unittest_main, unittest - -from logilab.common.fileutils import * - -DATA_DIR = join(os.path.abspath(os.path.dirname(__file__)), 'data') -NEWLINES_TXT = join(DATA_DIR, 'newlines.txt') - - -class FirstleveldirectoryTC(TestCase): - - def test_known_values_first_level_directory(self): - """return the first level directory of a path""" - self.assertEqual(first_level_directory('truc/bidule/chouette'), 'truc', None) - self.assertEqual(first_level_directory('/truc/bidule/chouette'), '/', None) - -class IsBinaryTC(TestCase): - def test(self): - self.assertEqual(is_binary('toto.txt'), 0) - #self.assertEqual(is_binary('toto.xml'), 0) - self.assertEqual(is_binary('toto.bin'), 1) - self.assertEqual(is_binary('toto.sxi'), 1) - self.assertEqual(is_binary('toto.whatever'), 1) - -class GetModeTC(TestCase): - def test(self): - self.assertEqual(write_open_mode('toto.txt'), 'w') - #self.assertEqual(write_open_mode('toto.xml'), 'w') - self.assertEqual(write_open_mode('toto.bin'), 'wb') - self.assertEqual(write_open_mode('toto.sxi'), 'wb') - -class NormReadTC(TestCase): - def test_known_values_norm_read(self): - with io.open(NEWLINES_TXT) as f: - data = f.read() - self.assertEqual(data.strip(), '\n'.join(['# mixed new lines', '1', '2', '3'])) - - -class LinesTC(TestCase): - def test_known_values_lines(self): - self.assertEqual(lines(NEWLINES_TXT), - ['# mixed new lines', '1', '2', '3']) - - def test_known_values_lines_comment(self): - self.assertEqual(lines(NEWLINES_TXT, comments='#'), - ['1', '2', '3']) - -class ExportTC(TestCase): - def setUp(self): - self.tempdir = tempfile.mktemp() - os.mkdir(self.tempdir) - - def test(self): - export(DATA_DIR, self.tempdir, verbose=0) - self.assertTrue(exists(join(self.tempdir, '__init__.py'))) - self.assertTrue(exists(join(self.tempdir, 'sub'))) - self.assertTrue(not exists(join(self.tempdir, '__init__.pyc'))) - self.assertTrue(not exists(join(self.tempdir, 'CVS'))) - - def tearDown(self): - shutil.rmtree(self.tempdir) - -class ProtectedFileTC(TestCase): - def setUp(self): - self.rpath = join(DATA_DIR, 'write_protected_file.txt') - self.rwpath = join(DATA_DIR, 'normal_file.txt') - # Make sure rpath is not writable ! - os.chmod(self.rpath, 33060) - # Make sure rwpath is writable ! - os.chmod(self.rwpath, 33188) - - def test_mode_change(self): - """tests that mode is changed when needed""" - # test on non-writable file - #self.assertTrue(not os.access(self.rpath, os.W_OK)) - self.assertTrue(not os.stat(self.rpath).st_mode & S_IWRITE) - wp_file = ProtectedFile(self.rpath, 'w') - self.assertTrue(os.stat(self.rpath).st_mode & S_IWRITE) - self.assertTrue(os.access(self.rpath, os.W_OK)) - # test on writable-file - self.assertTrue(os.stat(self.rwpath).st_mode & S_IWRITE) - self.assertTrue(os.access(self.rwpath, os.W_OK)) - wp_file = ProtectedFile(self.rwpath, 'w') - self.assertTrue(os.stat(self.rwpath).st_mode & S_IWRITE) - self.assertTrue(os.access(self.rwpath, os.W_OK)) - - def test_restore_on_close(self): - """tests original mode is restored on close""" - # test on non-writable file - #self.assertTrue(not os.access(self.rpath, os.W_OK)) - self.assertTrue(not os.stat(self.rpath).st_mode & S_IWRITE) - ProtectedFile(self.rpath, 'w').close() - #self.assertTrue(not os.access(self.rpath, os.W_OK)) - self.assertTrue(not os.stat(self.rpath).st_mode & S_IWRITE) - # test on writable-file - self.assertTrue(os.access(self.rwpath, os.W_OK)) - self.assertTrue(os.stat(self.rwpath).st_mode & S_IWRITE) - ProtectedFile(self.rwpath, 'w').close() - self.assertTrue(os.access(self.rwpath, os.W_OK)) - self.assertTrue(os.stat(self.rwpath).st_mode & S_IWRITE) - - def test_mode_change_on_append(self): - """tests that mode is changed when file is opened in 'a' mode""" - #self.assertTrue(not os.access(self.rpath, os.W_OK)) - self.assertTrue(not os.stat(self.rpath).st_mode & S_IWRITE) - wp_file = ProtectedFile(self.rpath, 'a') - self.assertTrue(os.access(self.rpath, os.W_OK)) - self.assertTrue(os.stat(self.rpath).st_mode & S_IWRITE) - wp_file.close() - #self.assertTrue(not os.access(self.rpath, os.W_OK)) - self.assertTrue(not os.stat(self.rpath).st_mode & S_IWRITE) - - -if sys.version_info < (3, 0): - def load_tests(loader, tests, ignore): - from logilab.common import fileutils - tests.addTests(doctest.DocTestSuite(fileutils)) - return tests - - -if __name__ == '__main__': - unittest_main() diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_graph.py b/pymode/libs/logilab-common-1.4.1/test/unittest_graph.py deleted file mode 100644 index 9a2e8bc9..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_graph.py +++ /dev/null @@ -1,89 +0,0 @@ -# unit tests for the cache module -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . - -from logilab.common.testlib import TestCase, unittest_main -from logilab.common.graph import get_cycles, has_path, ordered_nodes, UnorderableGraph - -class getCyclesTC(TestCase): - - def test_known0(self): - self.assertEqual(get_cycles({1:[2], 2:[3], 3:[1]}), [[1, 2, 3]]) - - def test_known1(self): - self.assertEqual(get_cycles({1:[2], 2:[3], 3:[1, 4], 4:[3]}), [[1, 2, 3], [3, 4]]) - - def test_known2(self): - self.assertEqual(get_cycles({1:[2], 2:[3], 3:[0], 0:[]}), []) - - -class hasPathTC(TestCase): - - def test_direct_connection(self): - self.assertEqual(has_path({'A': ['B'], 'B': ['A']}, 'A', 'B'), ['B']) - - def test_indirect_connection(self): - self.assertEqual(has_path({'A': ['B'], 'B': ['A', 'C'], 'C': ['B']}, 'A', 'C'), ['B', 'C']) - - def test_no_connection(self): - self.assertEqual(has_path({'A': ['B'], 'B': ['A']}, 'A', 'C'), None) - - def test_cycle(self): - self.assertEqual(has_path({'A': ['A']}, 'A', 'B'), None) - -class ordered_nodesTC(TestCase): - - def test_one_item(self): - graph = {'a':[]} - ordered = ordered_nodes(graph) - self.assertEqual(ordered, ('a',)) - - def test_single_dependency(self): - graph = {'a':['b'], 'b':[]} - ordered = ordered_nodes(graph) - self.assertEqual(ordered, ('a','b')) - graph = {'a':[], 'b':['a']} - ordered = ordered_nodes(graph) - self.assertEqual(ordered, ('b','a')) - - def test_two_items_no_dependency(self): - graph = {'a':[], 'b':[]} - ordered = ordered_nodes(graph) - self.assertEqual(ordered, ('a','b')) - - def test_three_items_no_dependency(self): - graph = {'a':[], 'b':[], 'c':[]} - ordered = ordered_nodes(graph) - self.assertEqual(ordered, ('a', 'b', 'c')) - - def test_three_items_one_dependency(self): - graph = {'a': ['c'], 'b': [], 'c':[]} - ordered = ordered_nodes(graph) - self.assertEqual(ordered, ('a', 'b', 'c')) - - def test_three_items_two_dependencies(self): - graph = {'a': ['b'], 'b': ['c'], 'c':[]} - ordered = ordered_nodes(graph) - self.assertEqual(ordered, ('a', 'b', 'c')) - - def test_bad_graph(self): - graph = {'a':['b']} - self.assertRaises(UnorderableGraph, ordered_nodes, graph) - -if __name__ == "__main__": - unittest_main() diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_interface.py b/pymode/libs/logilab-common-1.4.1/test/unittest_interface.py deleted file mode 100644 index 1dbed7a1..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_interface.py +++ /dev/null @@ -1,87 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -from logilab.common.testlib import TestCase, unittest_main -from logilab.common.interface import * - -class IFace1(Interface): pass -class IFace2(Interface): pass -class IFace3(Interface): pass - - -class A(object): - __implements__ = (IFace1,) - - -class B(A): pass - - -class C1(B): - __implements__ = list(B.__implements__) + [IFace3] - -class C2(B): - __implements__ = B.__implements__ + (IFace2,) - -class D(C1): - __implements__ = () - -class Z(object): pass - -class ExtendTC(TestCase): - - def setUp(self): - global aimpl, c1impl, c2impl, dimpl - aimpl = A.__implements__ - c1impl = C1.__implements__ - c2impl = C2.__implements__ - dimpl = D.__implements__ - - def test_base(self): - extend(A, IFace2) - self.assertEqual(A.__implements__, (IFace1, IFace2)) - self.assertEqual(B.__implements__, (IFace1, IFace2)) - self.assertTrue(B.__implements__ is A.__implements__) - self.assertEqual(C1.__implements__, [IFace1, IFace3, IFace2]) - self.assertEqual(C2.__implements__, (IFace1, IFace2)) - self.assertTrue(C2.__implements__ is c2impl) - self.assertEqual(D.__implements__, (IFace2,)) - - def test_already_impl(self): - extend(A, IFace1) - self.assertTrue(A.__implements__ is aimpl) - - def test_no_impl(self): - extend(Z, IFace1) - self.assertEqual(Z.__implements__, (IFace1,)) - - def test_notimpl_explicit(self): - extend(C1, IFace3) - self.assertTrue(C1.__implements__ is c1impl) - self.assertTrue(D.__implements__ is dimpl) - - - def test_nonregr_implements_baseinterface(self): - class SubIFace(IFace1): pass - class X(object): - __implements__ = (SubIFace,) - - self.assertTrue(SubIFace.is_implemented_by(X)) - self.assertTrue(IFace1.is_implemented_by(X)) - - -if __name__ == '__main__': - unittest_main() diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_modutils.py b/pymode/libs/logilab-common-1.4.1/test/unittest_modutils.py deleted file mode 100644 index ec2a5c82..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_modutils.py +++ /dev/null @@ -1,296 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -""" -unit tests for module modutils (module manipulation utilities) -""" - -import doctest -import sys -import warnings -try: - __file__ -except NameError: - __file__ = sys.argv[0] - -from logilab.common.testlib import TestCase, unittest_main -from logilab.common import modutils - -from os import path, getcwd, sep -from logilab import common -from logilab.common import tree - -sys.path.insert(0, path.dirname(__file__)) -DATADIR = path.join(path.dirname(__file__), 'data') - - -class ModutilsTestCase(TestCase): - def setUp(self): - super(ModutilsTestCase, self).setUp() - self.__common_in_path = common.__path__[0] in sys.path - if self.__common_in_path: - sys.path.remove(common.__path__[0]) - - def tearDown(self): - if self.__common_in_path: - sys.path.insert(0, common.__path__[0]) - super(ModutilsTestCase, self).tearDown() - - -class ModuleFileTC(ModutilsTestCase): - package = "mypypa" - - def setUp(self): - super(ModuleFileTC, self).setUp() - for k in list(sys.path_importer_cache.keys()): - if 'MyPyPa' in k: - del sys.path_importer_cache[k] - - def test_find_zipped_module(self): - mtype, mfile = modutils._module_file([self.package], [path.join(DATADIR, 'MyPyPa-0.1.0.zip')]) - self.assertEqual(mtype, modutils.ZIPFILE) - self.assertEqual(mfile.split(sep)[-4:], ["test", "data", "MyPyPa-0.1.0.zip", self.package]) - - def test_find_egg_module(self): - mtype, mfile = modutils._module_file([self.package], [path.join(DATADIR, 'MyPyPa-0.1.0-py2.5.egg')]) - self.assertEqual(mtype, modutils.ZIPFILE) - self.assertEqual(mfile.split(sep)[-4:], ["test", "data", "MyPyPa-0.1.0-py2.5.egg", self.package]) - - -class load_module_from_name_tc(ModutilsTestCase): - """ load a python module from it's name """ - - def test_knownValues_load_module_from_name_1(self): - self.assertEqual(modutils.load_module_from_name('sys'), sys) - - def test_knownValues_load_module_from_name_2(self): - self.assertEqual(modutils.load_module_from_name('os.path'), path) - - def test_raise_load_module_from_name_1(self): - self.assertRaises(ImportError, - modutils.load_module_from_name, 'os.path', use_sys=0) - - -class get_module_part_tc(ModutilsTestCase): - """given a dotted name return the module part of the name""" - - def test_knownValues_get_module_part_1(self): - self.assertEqual(modutils.get_module_part('logilab.common.modutils'), - 'logilab.common.modutils') - - def test_knownValues_get_module_part_2(self): - self.assertEqual(modutils.get_module_part('logilab.common.modutils.get_module_part'), - 'logilab.common.modutils') - - def test_knownValues_get_module_part_3(self): - """relative import from given file""" - self.assertEqual(modutils.get_module_part('interface.Interface', - modutils.__file__), 'interface') - - def test_knownValues_get_compiled_module_part(self): - self.assertEqual(modutils.get_module_part('math.log10'), 'math') - self.assertEqual(modutils.get_module_part('math.log10', __file__), 'math') - - def test_knownValues_get_builtin_module_part(self): - self.assertEqual(modutils.get_module_part('sys.path'), 'sys') - self.assertEqual(modutils.get_module_part('sys.path', '__file__'), 'sys') - - def test_get_module_part_exception(self): - self.assertRaises(ImportError, modutils.get_module_part, 'unknown.module', - modutils.__file__) - - -class modpath_from_file_tc(ModutilsTestCase): - """ given an absolute file path return the python module's path as a list """ - - def test_knownValues_modpath_from_file_1(self): - with warnings.catch_warnings(record=True) as warns: - self.assertEqual(modutils.modpath_from_file(modutils.__file__), - ['logilab', 'common', 'modutils']) - self.assertIn('you should avoid using modpath_from_file()', - [str(w.message) for w in warns]) - - def test_knownValues_modpath_from_file_2(self): - self.assertEqual(modutils.modpath_from_file('unittest_modutils.py', - {getcwd(): 'arbitrary.pkg'}), - ['arbitrary', 'pkg', 'unittest_modutils']) - - def test_raise_modpath_from_file_Exception(self): - self.assertRaises(Exception, modutils.modpath_from_file, '/turlututu') - - -class load_module_from_path_tc(ModutilsTestCase): - - def test_do_not_load_twice(self): - sys.path.insert(0, self.datadir) - foo = modutils.load_module_from_modpath(['lmfp', 'foo']) - lmfp = modutils.load_module_from_modpath(['lmfp']) - self.assertEqual(len(sys.just_once), 1) - sys.path.pop(0) - del sys.just_once - -class file_from_modpath_tc(ModutilsTestCase): - """given a mod path (i.e. splited module / package name), return the - corresponding file, giving priority to source file over precompiled file - if it exists""" - - def test_site_packages(self): - from pytz import tzinfo - self.assertEqual(path.realpath(modutils.file_from_modpath(['pytz', 'tzinfo'])), - path.realpath(tzinfo.__file__.replace('.pyc', '.py'))) - - def test_std_lib(self): - from os import path - self.assertEqual(path.realpath(modutils.file_from_modpath(['os', 'path']).replace('.pyc', '.py')), - path.realpath(path.__file__.replace('.pyc', '.py'))) - - def test_xmlplus(self): - try: - # don't fail if pyxml isn't installed - from xml.dom import ext - except ImportError: - pass - else: - self.assertEqual(path.realpath(modutils.file_from_modpath(['xml', 'dom', 'ext']).replace('.pyc', '.py')), - path.realpath(ext.__file__.replace('.pyc', '.py'))) - - def test_builtin(self): - self.assertEqual(modutils.file_from_modpath(['sys']), - None) - - - def test_unexisting(self): - self.assertRaises(ImportError, modutils.file_from_modpath, ['turlututu']) - - -class get_source_file_tc(ModutilsTestCase): - - def test(self): - from os import path - self.assertEqual(modutils.get_source_file(path.__file__), - path.__file__.replace('.pyc', '.py')) - - def test_raise(self): - self.assertRaises(modutils.NoSourceFile, modutils.get_source_file, 'whatever') - -class is_standard_module_tc(ModutilsTestCase): - """ - return true if the module may be considered as a module from the standard - library - """ - - def test_builtins(self): - if sys.version_info < (3, 0): - self.assertEqual(modutils.is_standard_module('__builtin__'), True) - self.assertEqual(modutils.is_standard_module('builtins'), False) - else: - self.assertEqual(modutils.is_standard_module('__builtin__'), False) - self.assertEqual(modutils.is_standard_module('builtins'), True) - - def test_builtin(self): - self.assertEqual(modutils.is_standard_module('sys'), True) - - def test_nonstandard(self): - self.assertEqual(modutils.is_standard_module('logilab'), False) - - def test_unknown(self): - self.assertEqual(modutils.is_standard_module('unknown'), False) - - def test_4(self): - self.assertEqual(modutils.is_standard_module('marshal'), True) - self.assertEqual(modutils.is_standard_module('pickle'), True) - self.assertEqual(modutils.is_standard_module('email'), True) - self.assertEqual(modutils.is_standard_module('StringIO'), sys.version_info < (3, 0)) - venv_py3 = sys.version_info[0] >= 3 and hasattr(sys, 'real_prefix') - if not venv_py3: - # those modules are symlinked by virtualenv (but not by python's venv) - self.assertEqual(modutils.is_standard_module('hashlib'), True) - self.assertEqual(modutils.is_standard_module('io'), True) - - def test_custom_path(self): - self.assertEqual(modutils.is_standard_module('data.module', (DATADIR,)), True) - self.assertEqual(modutils.is_standard_module('data.module', (path.abspath(DATADIR),)), True) - - def test_failing_border_cases(self): - # using a subpackage/submodule path as std_path argument - self.assertEqual(modutils.is_standard_module('logilab.common', common.__path__), False) - # using a module + object name as modname argument - self.assertEqual(modutils.is_standard_module('sys.path'), True) - # this is because only the first package/module is considered - self.assertEqual(modutils.is_standard_module('sys.whatever'), True) - self.assertEqual(modutils.is_standard_module('logilab.whatever', common.__path__), False) - - -class is_relative_tc(ModutilsTestCase): - - - def test_knownValues_is_relative_1(self): - self.assertEqual(modutils.is_relative('modutils', common.__path__[0]), True) - - def test_knownValues_is_relative_2(self): - self.assertEqual(modutils.is_relative('modutils', tree.__file__), True) - - def test_knownValues_is_relative_3(self): - self.assertEqual(modutils.is_relative('logilab.common.modutils', - common.__path__[0]), False) - -class get_modules_tc(ModutilsTestCase): - - def test_knownValues_get_modules_1(self): # XXXFIXME: TOWRITE - """given a directory return a list of all available python modules, even - in subdirectories - """ - import data.find_test as data - mod_path = ("data", 'find_test') - modules = sorted(modutils.get_modules(path.join(*mod_path), - data.__path__[0])) - self.assertSetEqual(set(modules), - set([ '.'.join(mod_path + (mod, )) for mod in ('module', 'module2', - 'noendingnewline', 'nonregr')])) - - -class get_modules_files_tc(ModutilsTestCase): - - def test_knownValues_get_module_files_1(self): # XXXFIXME: TOWRITE - """given a directory return a list of all available python module's files, even - in subdirectories - """ - import data - modules = sorted(modutils.get_module_files(path.join(DATADIR, 'find_test'), - data.__path__[0])) - self.assertEqual(modules, - [path.join(DATADIR, 'find_test', x) for x in ['__init__.py', 'module.py', 'module2.py', 'noendingnewline.py', 'nonregr.py']]) - - def test_load_module_set_attribute(self): - import logilab.common.fileutils - import logilab - del logilab.common.fileutils - del sys.modules['logilab.common.fileutils'] - m = modutils.load_module_from_modpath(['logilab', 'common', 'fileutils']) - self.assertTrue( hasattr(logilab, 'common') ) - self.assertTrue( hasattr(logilab.common, 'fileutils') ) - self.assertTrue( m is logilab.common.fileutils ) - - -def load_tests(loader, tests, ignore): - from logilab.common import modutils - tests.addTests(doctest.DocTestSuite(modutils)) - return tests - - -if __name__ == '__main__': - unittest_main() diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_pytest.py b/pymode/libs/logilab-common-1.4.1/test/unittest_pytest.py deleted file mode 100644 index 48e36ce5..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_pytest.py +++ /dev/null @@ -1,86 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -from os.path import join -from logilab.common.testlib import TestCase, unittest_main -from logilab.common.pytest import * - -class ModuleFunctionTC(TestCase): - def test_this_is_testdir(self): - self.assertTrue(this_is_a_testdir("test")) - self.assertTrue(this_is_a_testdir("tests")) - self.assertTrue(this_is_a_testdir("unittests")) - self.assertTrue(this_is_a_testdir("unittest")) - self.assertFalse(this_is_a_testdir("unit")) - self.assertFalse(this_is_a_testdir("units")) - self.assertFalse(this_is_a_testdir("undksjhqfl")) - self.assertFalse(this_is_a_testdir("this_is_not_a_dir_test")) - self.assertFalse(this_is_a_testdir("this_is_not_a_testdir")) - self.assertFalse(this_is_a_testdir("unittestsarenothere")) - self.assertTrue(this_is_a_testdir(join("coincoin", "unittests"))) - self.assertFalse(this_is_a_testdir(join("unittests", "spongebob"))) - - def test_this_is_testfile(self): - self.assertTrue(this_is_a_testfile("test.py")) - self.assertTrue(this_is_a_testfile("testbabar.py")) - self.assertTrue(this_is_a_testfile("unittest_celestine.py")) - self.assertTrue(this_is_a_testfile("smoketest.py")) - self.assertFalse(this_is_a_testfile("test.pyc")) - self.assertFalse(this_is_a_testfile("zephir_test.py")) - self.assertFalse(this_is_a_testfile("smoketest.pl")) - self.assertFalse(this_is_a_testfile("unittest")) - self.assertTrue(this_is_a_testfile(join("coincoin", "unittest_bibi.py"))) - self.assertFalse(this_is_a_testfile(join("unittest", "spongebob.py"))) - - def test_replace_trace(self): - def tracefn(frame, event, arg): - pass - - oldtrace = sys.gettrace() - with replace_trace(tracefn): - self.assertIs(sys.gettrace(), tracefn) - - self.assertIs(sys.gettrace(), oldtrace) - - def test_pause_trace(self): - def tracefn(frame, event, arg): - pass - - oldtrace = sys.gettrace() - sys.settrace(tracefn) - try: - self.assertIs(sys.gettrace(), tracefn) - with pause_trace(): - self.assertIs(sys.gettrace(), None) - self.assertIs(sys.gettrace(), tracefn) - finally: - sys.settrace(oldtrace) - - def test_nocoverage(self): - def tracefn(frame, event, arg): - pass - - @nocoverage - def myfn(): - self.assertIs(sys.gettrace(), None) - - with replace_trace(tracefn): - myfn() - - -if __name__ == '__main__': - unittest_main() diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_registry.py b/pymode/libs/logilab-common-1.4.1/test/unittest_registry.py deleted file mode 100644 index 1c07e4ce..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_registry.py +++ /dev/null @@ -1,220 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of Logilab-Common. -# -# Logilab-Common is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# Logilab-Common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with Logilab-Common. If not, see . -"""unit tests for selectors mechanism""" - -import gc -import logging -import os.path as osp -import sys -from operator import eq, lt, le, gt -from contextlib import contextmanager -import warnings - -logging.basicConfig(level=logging.ERROR) - -from logilab.common.testlib import TestCase, unittest_main - -from logilab.common.registry import * - - -class _1_(Predicate): - def __call__(self, *args, **kwargs): - return 1 - -class _0_(Predicate): - def __call__(self, *args, **kwargs): - return 0 - -def _2_(*args, **kwargs): - return 2 - - -class SelectorsTC(TestCase): - def test_basic_and(self): - selector = _1_() & _1_() - self.assertEqual(selector(None), 2) - selector = _1_() & _0_() - self.assertEqual(selector(None), 0) - selector = _0_() & _1_() - self.assertEqual(selector(None), 0) - - def test_basic_or(self): - selector = _1_() | _1_() - self.assertEqual(selector(None), 1) - selector = _1_() | _0_() - self.assertEqual(selector(None), 1) - selector = _0_() | _1_() - self.assertEqual(selector(None), 1) - selector = _0_() | _0_() - self.assertEqual(selector(None), 0) - - def test_selector_and_function(self): - selector = _1_() & _2_ - self.assertEqual(selector(None), 3) - selector = _2_ & _1_() - self.assertEqual(selector(None), 3) - - def test_three_and(self): - selector = _1_() & _1_() & _1_() - self.assertEqual(selector(None), 3) - selector = _1_() & _0_() & _1_() - self.assertEqual(selector(None), 0) - selector = _0_() & _1_() & _1_() - self.assertEqual(selector(None), 0) - - def test_three_or(self): - selector = _1_() | _1_() | _1_() - self.assertEqual(selector(None), 1) - selector = _1_() | _0_() | _1_() - self.assertEqual(selector(None), 1) - selector = _0_() | _1_() | _1_() - self.assertEqual(selector(None), 1) - selector = _0_() | _0_() | _0_() - self.assertEqual(selector(None), 0) - - def test_composition(self): - selector = (_1_() & _1_()) & (_1_() & _1_()) - self.assertTrue(isinstance(selector, AndPredicate)) - self.assertEqual(len(selector.selectors), 4) - self.assertEqual(selector(None), 4) - selector = (_1_() & _0_()) | (_1_() & _1_()) - self.assertTrue(isinstance(selector, OrPredicate)) - self.assertEqual(len(selector.selectors), 2) - self.assertEqual(selector(None), 2) - - def test_search_selectors(self): - sel = _1_() - self.assertIs(sel.search_selector(_1_), sel) - csel = AndPredicate(sel, Predicate()) - self.assertIs(csel.search_selector(_1_), sel) - csel = AndPredicate(Predicate(), sel) - self.assertIs(csel.search_selector(_1_), sel) - self.assertIs(csel.search_selector((AndPredicate, OrPredicate)), csel) - self.assertIs(csel.search_selector((OrPredicate, AndPredicate)), csel) - self.assertIs(csel.search_selector((_1_, _0_)), sel) - self.assertIs(csel.search_selector((_0_, _1_)), sel) - - def test_inplace_and(self): - selector = _1_() - selector &= _1_() - selector &= _1_() - self.assertEqual(selector(None), 3) - selector = _1_() - selector &= _0_() - selector &= _1_() - self.assertEqual(selector(None), 0) - selector = _0_() - selector &= _1_() - selector &= _1_() - self.assertEqual(selector(None), 0) - selector = _0_() - selector &= _0_() - selector &= _0_() - self.assertEqual(selector(None), 0) - - def test_inplace_or(self): - selector = _1_() - selector |= _1_() - selector |= _1_() - self.assertEqual(selector(None), 1) - selector = _1_() - selector |= _0_() - selector |= _1_() - self.assertEqual(selector(None), 1) - selector = _0_() - selector |= _1_() - selector |= _1_() - self.assertEqual(selector(None), 1) - selector = _0_() - selector |= _0_() - selector |= _0_() - self.assertEqual(selector(None), 0) - - def test_wrap_selectors(self): - class _temp_(Predicate): - def __call__(self, *args, **kwargs): - return 0 - del _temp_ # test weakref - s1 = _1_() & _1_() - s2 = _1_() & _0_() - s3 = _0_() & _1_() - gc.collect() - self.count = 0 - def decorate(f, self=self): - def wrapper(*args, **kwargs): - self.count += 1 - return f(*args, **kwargs) - return wrapper - wrap_predicates(decorate) - self.assertEqual(s1(None), 2) - self.assertEqual(s2(None), 0) - self.assertEqual(s3(None), 0) - self.assertEqual(self.count, 8) - -@contextmanager -def prepended_syspath(path): - sys.path.insert(0, path) - yield - sys.path = sys.path[1:] - -class RegistryStoreTC(TestCase): - - def test_autoload(self): - store = RegistryStore() - store.setdefault('zereg') - with prepended_syspath(self.datadir): - with warnings.catch_warnings(record=True) as warns: - store.register_objects([self.datapath('regobjects.py'), - self.datapath('regobjects2.py')]) - self.assertIn('use register_modnames() instead', - [str(w.message) for w in warns]) - self.assertEqual(['zereg'], list(store.keys())) - self.assertEqual(set(('appobject1', 'appobject2', 'appobject3')), - set(store['zereg'])) - - def test_autoload_modnames(self): - store = RegistryStore() - store.setdefault('zereg') - with prepended_syspath(self.datadir): - store.register_modnames(['regobjects', 'regobjects2']) - self.assertEqual(['zereg'], list(store.keys())) - self.assertEqual(set(('appobject1', 'appobject2', 'appobject3')), - set(store['zereg'])) - - -class RegistrableInstanceTC(TestCase): - - def test_instance_modulename(self): - with warnings.catch_warnings(record=True) as warns: - obj = RegistrableInstance() - self.assertEqual(obj.__module__, 'unittest_registry') - self.assertIn('instantiate RegistrableInstance with __module__=__name__', - [str(w.message) for w in warns]) - # no inheritance - obj = RegistrableInstance(__module__=__name__) - self.assertEqual(obj.__module__, 'unittest_registry') - # with inheritance from another python file - with prepended_syspath(self.datadir): - from regobjects2 import instance, MyRegistrableInstance - instance2 = MyRegistrableInstance(__module__=__name__) - self.assertEqual(instance.__module__, 'regobjects2') - self.assertEqual(instance2.__module__, 'unittest_registry') - - -if __name__ == '__main__': - unittest_main() diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_shellutils.py b/pymode/libs/logilab-common-1.4.1/test/unittest_shellutils.py deleted file mode 100644 index 9342ae9b..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_shellutils.py +++ /dev/null @@ -1,235 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""unit tests for logilab.common.shellutils""" - -import sys, os, tempfile, shutil -from os.path import join, dirname, abspath -import datetime, time - -from six.moves import range - -from logilab.common.testlib import TestCase, unittest_main - -from logilab.common.shellutils import (globfind, find, ProgressBar, - RawInput) -from logilab.common.compat import StringIO - - -DATA_DIR = join(dirname(abspath(__file__)), 'data', 'find_test') - - -class FindTC(TestCase): - def test_include(self): - files = set(find(DATA_DIR, '.py')) - self.assertSetEqual(files, - set([join(DATA_DIR, f) for f in ['__init__.py', 'module.py', - 'module2.py', 'noendingnewline.py', - 'nonregr.py', join('sub', 'momo.py')]])) - files = set(find(DATA_DIR, ('.py',), blacklist=('sub',))) - self.assertSetEqual(files, - set([join(DATA_DIR, f) for f in ['__init__.py', 'module.py', - 'module2.py', 'noendingnewline.py', - 'nonregr.py']])) - - def test_exclude(self): - files = set(find(DATA_DIR, ('.py', '.pyc'), exclude=True)) - self.assertSetEqual(files, - set([join(DATA_DIR, f) for f in ['foo.txt', - 'newlines.txt', - 'normal_file.txt', - 'test.ini', - 'test1.msg', - 'test2.msg', - 'spam.txt', - join('sub', 'doc.txt'), - 'write_protected_file.txt', - ]])) - - def test_globfind(self): - files = set(globfind(DATA_DIR, '*.py')) - self.assertSetEqual(files, - set([join(DATA_DIR, f) for f in ['__init__.py', 'module.py', - 'module2.py', 'noendingnewline.py', - 'nonregr.py', join('sub', 'momo.py')]])) - files = set(globfind(DATA_DIR, 'mo*.py')) - self.assertSetEqual(files, - set([join(DATA_DIR, f) for f in ['module.py', 'module2.py', - join('sub', 'momo.py')]])) - files = set(globfind(DATA_DIR, 'mo*.py', blacklist=('sub',))) - self.assertSetEqual(files, - set([join(DATA_DIR, f) for f in ['module.py', 'module2.py']])) - - -class ProgressBarTC(TestCase): - def test_refresh(self): - pgb_stream = StringIO() - expected_stream = StringIO() - pgb = ProgressBar(20, stream=pgb_stream) - self.assertEqual(pgb_stream.getvalue(), expected_stream.getvalue()) # nothing print before refresh - pgb.refresh() - expected_stream.write("\r["+' '*20+"]") - self.assertEqual(pgb_stream.getvalue(), expected_stream.getvalue()) - - def test_refresh_g_size(self): - pgb_stream = StringIO() - expected_stream = StringIO() - pgb = ProgressBar(20, 35, stream=pgb_stream) - pgb.refresh() - expected_stream.write("\r["+' '*35+"]") - self.assertEqual(pgb_stream.getvalue(), expected_stream.getvalue()) - - def test_refresh_l_size(self): - pgb_stream = StringIO() - expected_stream = StringIO() - pgb = ProgressBar(20, 3, stream=pgb_stream) - pgb.refresh() - expected_stream.write("\r["+' '*3+"]") - self.assertEqual(pgb_stream.getvalue(), expected_stream.getvalue()) - - def _update_test(self, nbops, expected, size = None): - pgb_stream = StringIO() - expected_stream = StringIO() - if size is None: - pgb = ProgressBar(nbops, stream=pgb_stream) - size=20 - else: - pgb = ProgressBar(nbops, size, stream=pgb_stream) - last = 0 - for round in expected: - if not hasattr(round, '__int__'): - dots, update = round - else: - dots, update = round, None - pgb.update() - if update or (update is None and dots != last): - last = dots - expected_stream.write("\r["+('='*dots)+(' '*(size-dots))+"]") - self.assertEqual(pgb_stream.getvalue(), expected_stream.getvalue()) - - def test_default(self): - self._update_test(20, range(1, 21)) - - def test_nbops_gt_size(self): - """Test the progress bar for nbops > size""" - def half(total): - for counter in range(1, total+1): - yield counter // 2 - self._update_test(40, half(40)) - - def test_nbops_lt_size(self): - """Test the progress bar for nbops < size""" - def double(total): - for counter in range(1, total+1): - yield counter * 2 - self._update_test(10, double(10)) - - def test_nbops_nomul_size(self): - """Test the progress bar for size % nbops !=0 (non int number of dots per update)""" - self._update_test(3, (6, 13, 20)) - - def test_overflow(self): - self._update_test(5, (8, 16, 25, 33, 42, (42, True)), size=42) - - def test_update_exact(self): - pgb_stream = StringIO() - expected_stream = StringIO() - size=20 - pgb = ProgressBar(100, size, stream=pgb_stream) - last = 0 - for dots in range(10, 105, 15): - pgb.update(dots, exact=True) - dots //= 5 - expected_stream.write("\r["+('='*dots)+(' '*(size-dots))+"]") - self.assertEqual(pgb_stream.getvalue(), expected_stream.getvalue()) - - def test_update_relative(self): - pgb_stream = StringIO() - expected_stream = StringIO() - size=20 - pgb = ProgressBar(100, size, stream=pgb_stream) - last = 0 - for dots in range(5, 105, 5): - pgb.update(5, exact=False) - dots //= 5 - expected_stream.write("\r["+('='*dots)+(' '*(size-dots))+"]") - self.assertEqual(pgb_stream.getvalue(), expected_stream.getvalue()) - - -class RawInputTC(TestCase): - - def auto_input(self, *args): - self.input_args = args - return self.input_answer - - def setUp(self): - null_printer = lambda x: None - self.qa = RawInput(self.auto_input, null_printer) - - def test_ask_default(self): - self.input_answer = '' - answer = self.qa.ask('text', ('yes', 'no'), 'yes') - self.assertEqual(answer, 'yes') - self.input_answer = ' ' - answer = self.qa.ask('text', ('yes', 'no'), 'yes') - self.assertEqual(answer, 'yes') - - def test_ask_case(self): - self.input_answer = 'no' - answer = self.qa.ask('text', ('yes', 'no'), 'yes') - self.assertEqual(answer, 'no') - self.input_answer = 'No' - answer = self.qa.ask('text', ('yes', 'no'), 'yes') - self.assertEqual(answer, 'no') - self.input_answer = 'NO' - answer = self.qa.ask('text', ('yes', 'no'), 'yes') - self.assertEqual(answer, 'no') - self.input_answer = 'nO' - answer = self.qa.ask('text', ('yes', 'no'), 'yes') - self.assertEqual(answer, 'no') - self.input_answer = 'YES' - answer = self.qa.ask('text', ('yes', 'no'), 'yes') - self.assertEqual(answer, 'yes') - - def test_ask_prompt(self): - self.input_answer = '' - answer = self.qa.ask('text', ('yes', 'no'), 'yes') - self.assertEqual(self.input_args[0], 'text [Y(es)/n(o)]: ') - answer = self.qa.ask('text', ('y', 'n'), 'y') - self.assertEqual(self.input_args[0], 'text [Y/n]: ') - answer = self.qa.ask('text', ('n', 'y'), 'y') - self.assertEqual(self.input_args[0], 'text [n/Y]: ') - answer = self.qa.ask('text', ('yes', 'no', 'maybe', '1'), 'yes') - self.assertEqual(self.input_args[0], 'text [Y(es)/n(o)/m(aybe)/1]: ') - - def test_ask_ambiguous(self): - self.input_answer = 'y' - self.assertRaises(Exception, self.qa.ask, 'text', ('yes', 'yep'), 'yes') - - def test_confirm(self): - self.input_answer = 'y' - self.assertEqual(self.qa.confirm('Say yes'), True) - self.assertEqual(self.qa.confirm('Say yes', default_is_yes=False), True) - self.input_answer = 'n' - self.assertEqual(self.qa.confirm('Say yes'), False) - self.assertEqual(self.qa.confirm('Say yes', default_is_yes=False), False) - self.input_answer = '' - self.assertEqual(self.qa.confirm('Say default'), True) - self.assertEqual(self.qa.confirm('Say default', default_is_yes=False), False) - -if __name__ == '__main__': - unittest_main() diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_table.py b/pymode/libs/logilab-common-1.4.1/test/unittest_table.py deleted file mode 100644 index 320b6938..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_table.py +++ /dev/null @@ -1,448 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -""" -Unittests for table management -""" - - -import sys -import os - -from six.moves import range - -from logilab.common.compat import StringIO -from logilab.common.testlib import TestCase, unittest_main -from logilab.common.table import Table, TableStyleSheet, DocbookTableWriter, \ - DocbookRenderer, TableStyle, TableWriter, TableCellRenderer - -class TableTC(TestCase): - """Table TestCase class""" - - def setUp(self): - """Creates a default table""" - # from logilab.common import table - # reload(table) - self.table = Table() - self.table.create_rows(['row1', 'row2', 'row3']) - self.table.create_columns(['col1', 'col2']) - - def test_valeur_scalaire(self): - tab = Table() - tab.create_columns(['col1']) - tab.append_row([1]) - self.assertEqual(tab, [[1]]) - tab.append_row([2]) - self.assertEqual(tab[0, 0], 1) - self.assertEqual(tab[1, 0], 2) - - def test_valeur_ligne(self): - tab = Table() - tab.create_columns(['col1', 'col2']) - tab.append_row([1, 2]) - self.assertEqual(tab, [[1, 2]]) - - def test_valeur_colonne(self): - tab = Table() - tab.create_columns(['col1']) - tab.append_row([1]) - tab.append_row([2]) - self.assertEqual(tab, [[1], [2]]) - self.assertEqual(tab[:, 0], [1, 2]) - - def test_indexation(self): - """we should be able to use [] to access rows""" - self.assertEqual(self.table[0], self.table.data[0]) - self.assertEqual(self.table[1], self.table.data[1]) - - def test_iterable(self): - """test iter(table)""" - it = iter(self.table) - self.assertEqual(next(it), self.table.data[0]) - self.assertEqual(next(it), self.table.data[1]) - - def test_get_rows(self): - """tests Table.get_rows()""" - self.assertEqual(self.table, [[0, 0], [0, 0], [0, 0]]) - self.assertEqual(self.table[:], [[0, 0], [0, 0], [0, 0]]) - self.table.insert_column(1, range(3), 'supp') - self.assertEqual(self.table, [[0, 0, 0], [0, 1, 0], [0, 2, 0]]) - self.assertEqual(self.table[:], [[0, 0, 0], [0, 1, 0], [0, 2, 0]]) - - def test_get_cells(self): - self.table.insert_column(1, range(3), 'supp') - self.assertEqual(self.table[0, 1], 0) - self.assertEqual(self.table[1, 1], 1) - self.assertEqual(self.table[2, 1], 2) - self.assertEqual(self.table['row1', 'supp'], 0) - self.assertEqual(self.table['row2', 'supp'], 1) - self.assertEqual(self.table['row3', 'supp'], 2) - self.assertRaises(KeyError, self.table.__getitem__, ('row1', 'foo')) - self.assertRaises(KeyError, self.table.__getitem__, ('foo', 'bar')) - - def test_shape(self): - """tests table shape""" - self.assertEqual(self.table.shape, (3, 2)) - self.table.insert_column(1, range(3), 'supp') - self.assertEqual(self.table.shape, (3, 3)) - - def test_set_column(self): - """Tests that table.set_column() works fine. - """ - self.table.set_column(0, range(3)) - self.assertEqual(self.table[0, 0], 0) - self.assertEqual(self.table[1, 0], 1) - self.assertEqual(self.table[2, 0], 2) - - def test_set_column_by_id(self): - """Tests that table.set_column_by_id() works fine. - """ - self.table.set_column_by_id('col1', range(3)) - self.assertEqual(self.table[0, 0], 0) - self.assertEqual(self.table[1, 0], 1) - self.assertEqual(self.table[2, 0], 2) - self.assertRaises(KeyError, self.table.set_column_by_id, 'col123', range(3)) - - def test_cells_ids(self): - """tests that we can access cells by giving row/col ids""" - self.assertRaises(KeyError, self.table.set_cell_by_ids, 'row12', 'col1', 12) - self.assertRaises(KeyError, self.table.set_cell_by_ids, 'row1', 'col12', 12) - self.assertEqual(self.table[0, 0], 0) - self.table.set_cell_by_ids('row1', 'col1', 'DATA') - self.assertEqual(self.table[0, 0], 'DATA') - self.assertRaises(KeyError, self.table.set_row_by_id, 'row12', []) - self.table.set_row_by_id('row1', ['1.0', '1.1']) - self.assertEqual(self.table[0, 0], '1.0') - - def test_insert_row(self): - """tests a row insertion""" - tmp_data = ['tmp1', 'tmp2'] - self.table.insert_row(1, tmp_data, 'tmprow') - self.assertEqual(self.table[1], tmp_data) - self.assertEqual(self.table['tmprow'], tmp_data) - self.table.delete_row_by_id('tmprow') - self.assertRaises(KeyError, self.table.delete_row_by_id, 'tmprow') - self.assertEqual(self.table[1], [0, 0]) - self.assertRaises(KeyError, self.table.__getitem__, 'tmprow') - - def test_get_column(self): - """Tests that table.get_column() works fine. - """ - self.table.set_cell(0, 1, 12) - self.table.set_cell(2, 1, 13) - self.assertEqual(self.table[:, 1], [12, 0, 13]) - self.assertEqual(self.table[:, 'col2'], [12, 0, 13]) - - def test_get_columns(self): - """Tests if table.get_columns() works fine. - """ - self.table.set_cell(0, 1, 12) - self.table.set_cell(2, 1, 13) - self.assertEqual(self.table.get_columns(), [[0, 0, 0], [12, 0, 13]]) - - def test_insert_column(self): - """Tests that table.insert_column() works fine. - """ - self.table.insert_column(1, range(3), "inserted_column") - self.assertEqual(self.table[:, 1], [0, 1, 2]) - self.assertEqual(self.table.col_names, - ['col1', 'inserted_column', 'col2']) - - def test_delete_column(self): - """Tests that table.delete_column() works fine. - """ - self.table.delete_column(1) - self.assertEqual(self.table.col_names, ['col1']) - self.assertEqual(self.table[:, 0], [0, 0, 0]) - self.assertRaises(KeyError, self.table.delete_column_by_id, 'col2') - self.table.delete_column_by_id('col1') - self.assertEqual(self.table.col_names, []) - - def test_transpose(self): - """Tests that table.transpose() works fine. - """ - self.table.append_column(range(5, 8), 'col3') - ttable = self.table.transpose() - self.assertEqual(ttable.row_names, ['col1', 'col2', 'col3']) - self.assertEqual(ttable.col_names, ['row1', 'row2', 'row3']) - self.assertEqual(ttable.data, [[0, 0, 0], [0, 0, 0], [5, 6, 7]]) - - def test_sort_table(self): - """Tests the table sort by column - """ - self.table.set_column(0, [3, 1, 2]) - self.table.set_column(1, [1, 2, 3]) - self.table.sort_by_column_index(0) - self.assertEqual(self.table.row_names, ['row2', 'row3', 'row1']) - self.assertEqual(self.table.data, [[1, 2], [2, 3], [3, 1]]) - self.table.sort_by_column_index(1, 'desc') - self.assertEqual(self.table.row_names, ['row3', 'row2', 'row1']) - self.assertEqual(self.table.data, [[2, 3], [1, 2], [3, 1]]) - - def test_sort_by_id(self): - """tests sort_by_column_id()""" - self.table.set_column_by_id('col1', [3, 1, 2]) - self.table.set_column_by_id('col2', [1, 2, 3]) - self.table.sort_by_column_id('col1') - self.assertRaises(KeyError, self.table.sort_by_column_id, 'col123') - self.assertEqual(self.table.row_names, ['row2', 'row3', 'row1']) - self.assertEqual(self.table.data, [[1, 2], [2, 3], [3, 1]]) - self.table.sort_by_column_id('col2', 'desc') - self.assertEqual(self.table.row_names, ['row3', 'row2', 'row1']) - self.assertEqual(self.table.data, [[2, 3], [1, 2], [3, 1]]) - - def test_pprint(self): - """only tests pprint doesn't raise an exception""" - self.table.pprint() - str(self.table) - - -class GroupByTC(TestCase): - """specific test suite for groupby()""" - def setUp(self): - t = Table() - t.create_columns(['date', 'res', 'task', 'usage']) - t.append_row(['date1', 'ing1', 'task1', 0.3]) - t.append_row(['date1', 'ing2', 'task2', 0.3]) - t.append_row(['date2', 'ing3', 'task3', 0.3]) - t.append_row(['date3', 'ing4', 'task2', 0.3]) - t.append_row(['date1', 'ing1', 'task3', 0.3]) - t.append_row(['date3', 'ing1', 'task3', 0.3]) - self.table = t - - def test_single_groupby(self): - """tests groupby() on several columns""" - grouped = self.table.groupby('date') - self.assertEqual(len(grouped), 3) - self.assertEqual(len(grouped['date1']), 3) - self.assertEqual(len(grouped['date2']), 1) - self.assertEqual(len(grouped['date3']), 2) - self.assertEqual(grouped['date1'], [ - ('date1', 'ing1', 'task1', 0.3), - ('date1', 'ing2', 'task2', 0.3), - ('date1', 'ing1', 'task3', 0.3), - ]) - self.assertEqual(grouped['date2'], [('date2', 'ing3', 'task3', 0.3)]) - self.assertEqual(grouped['date3'], [ - ('date3', 'ing4', 'task2', 0.3), - ('date3', 'ing1', 'task3', 0.3), - ]) - - def test_multiple_groupby(self): - """tests groupby() on several columns""" - grouped = self.table.groupby('date', 'task') - self.assertEqual(len(grouped), 3) - self.assertEqual(len(grouped['date1']), 3) - self.assertEqual(len(grouped['date2']), 1) - self.assertEqual(len(grouped['date3']), 2) - self.assertEqual(grouped['date1']['task1'], [('date1', 'ing1', 'task1', 0.3)]) - self.assertEqual(grouped['date2']['task3'], [('date2', 'ing3', 'task3', 0.3)]) - self.assertEqual(grouped['date3']['task2'], [('date3', 'ing4', 'task2', 0.3)]) - date3 = grouped['date3'] - self.assertRaises(KeyError, date3.__getitem__, 'task1') - - - def test_select(self): - """tests Table.select() method""" - rows = self.table.select('date', 'date1') - self.assertEqual(rows, [ - ('date1', 'ing1', 'task1', 0.3), - ('date1', 'ing2', 'task2', 0.3), - ('date1', 'ing1', 'task3', 0.3), - ]) - -class TableStyleSheetTC(TestCase): - """The Stylesheet test case - """ - def setUp(self): - """Builds a simple table to test the stylesheet - """ - self.table = Table() - self.table.create_row('row1') - self.table.create_columns(['a', 'b', 'c']) - self.stylesheet = TableStyleSheet() - # We don't want anything to be printed - self.stdout_backup = sys.stdout - sys.stdout = StringIO() - - def tearDown(self): - sys.stdout = self.stdout_backup - - def test_add_rule(self): - """Tests that the regex pattern works as expected. - """ - rule = '0_2 = sqrt(0_0**2 + 0_1**2)' - self.stylesheet.add_rule(rule) - self.table.set_row(0, [3, 4, 0]) - self.table.apply_stylesheet(self.stylesheet) - self.assertEqual(self.table[0], [3, 4, 5]) - self.assertEqual(len(self.stylesheet.rules), 1) - self.stylesheet.add_rule('some bad rule with bad syntax') - self.assertEqual(len(self.stylesheet.rules), 1, "Ill-formed rule mustn't be added") - self.assertEqual(len(self.stylesheet.instructions), 1, "Ill-formed rule mustn't be added") - - def test_stylesheet_init(self): - """tests Stylesheet.__init__""" - rule = '0_2 = 1' - sheet = TableStyleSheet([rule, 'bad rule']) - self.assertEqual(len(sheet.rules), 1, "Ill-formed rule mustn't be added") - self.assertEqual(len(sheet.instructions), 1, "Ill-formed rule mustn't be added") - - def test_rowavg_rule(self): - """Tests that add_rowavg_rule works as expected - """ - self.table.set_row(0, [10, 20, 0]) - self.stylesheet.add_rowavg_rule((0, 2), 0, 0, 1) - self.table.apply_stylesheet(self.stylesheet) - val = self.table[0, 2] - self.assertEqual(int(val), 15) - - - def test_rowsum_rule(self): - """Tests that add_rowsum_rule works as expected - """ - self.table.set_row(0, [10, 20, 0]) - self.stylesheet.add_rowsum_rule((0, 2), 0, 0, 1) - self.table.apply_stylesheet(self.stylesheet) - val = self.table[0, 2] - self.assertEqual(val, 30) - - - def test_colavg_rule(self): - """Tests that add_colavg_rule works as expected - """ - self.table.set_row(0, [10, 20, 0]) - self.table.append_row([12, 8, 3], 'row2') - self.table.create_row('row3') - self.stylesheet.add_colavg_rule((2, 0), 0, 0, 1) - self.table.apply_stylesheet(self.stylesheet) - val = self.table[2, 0] - self.assertEqual(int(val), 11) - - - def test_colsum_rule(self): - """Tests that add_colsum_rule works as expected - """ - self.table.set_row(0, [10, 20, 0]) - self.table.append_row([12, 8, 3], 'row2') - self.table.create_row('row3') - self.stylesheet.add_colsum_rule((2, 0), 0, 0, 1) - self.table.apply_stylesheet(self.stylesheet) - val = self.table[2, 0] - self.assertEqual(val, 22) - - - -class TableStyleTC(TestCase): - """Test suite for TableSuite""" - def setUp(self): - self.table = Table() - self.table.create_rows(['row1', 'row2', 'row3']) - self.table.create_columns(['col1', 'col2']) - self.style = TableStyle(self.table) - self._tested_attrs = (('size', '1*'), - ('alignment', 'right'), - ('unit', '')) - - def test_getset(self): - """tests style's get and set methods""" - for attrname, default_value in self._tested_attrs: - getter = getattr(self.style, 'get_%s' % attrname) - setter = getattr(self.style, 'set_%s' % attrname) - self.assertRaises(KeyError, getter, 'badcol') - self.assertEqual(getter('col1'), default_value) - setter('FOO', 'col1') - self.assertEqual(getter('col1'), 'FOO') - - def test_getset_index(self): - """tests style's get and set by index methods""" - for attrname, default_value in self._tested_attrs: - getter = getattr(self.style, 'get_%s' % attrname) - setter = getattr(self.style, 'set_%s' % attrname) - igetter = getattr(self.style, 'get_%s_by_index' % attrname) - isetter = getattr(self.style, 'set_%s_by_index' % attrname) - self.assertEqual(getter('__row_column__'), default_value) - isetter('FOO', 0) - self.assertEqual(getter('__row_column__'), 'FOO') - self.assertEqual(igetter(0), 'FOO') - self.assertEqual(getter('col1'), default_value) - isetter('FOO', 1) - self.assertEqual(getter('col1'), 'FOO') - self.assertEqual(igetter(1), 'FOO') - - -class RendererTC(TestCase): - """Test suite for DocbookRenderer""" - def setUp(self): - self.renderer = DocbookRenderer(alignment = True) - self.table = Table() - self.table.create_rows(['row1', 'row2', 'row3']) - self.table.create_columns(['col1', 'col2']) - self.style = TableStyle(self.table) - self.base_renderer = TableCellRenderer() - - def test_cell_content(self): - """test how alignment is rendered""" - entry_xml = self.renderer._render_cell_content('data', self.style, 1) - self.assertEqual(entry_xml, "data\n") - self.style.set_alignment_by_index('left', 1) - entry_xml = self.renderer._render_cell_content('data', self.style, 1) - self.assertEqual(entry_xml, "data\n") - - def test_default_content_rendering(self): - """tests that default rendering just prints the cell's content""" - rendered_cell = self.base_renderer._render_cell_content('data', self.style, 1) - self.assertEqual(rendered_cell, "data") - - def test_replacement_char(self): - """tests that 0 is replaced when asked for""" - cell_content = self.base_renderer._make_cell_content(0, self.style, 1) - self.assertEqual(cell_content, 0) - self.base_renderer.properties['skip_zero'] = '---' - cell_content = self.base_renderer._make_cell_content(0, self.style, 1) - self.assertEqual(cell_content, '---') - - def test_unit(self): - """tests if units are added""" - self.base_renderer.properties['units'] = True - self.style.set_unit_by_index('EUR', 1) - cell_content = self.base_renderer._make_cell_content(12, self.style, 1) - self.assertEqual(cell_content, '12 EUR') - - -class DocbookTableWriterTC(TestCase): - """TestCase for table's writer""" - def setUp(self): - self.stream = StringIO() - self.table = Table() - self.table.create_rows(['row1', 'row2', 'row3']) - self.table.create_columns(['col1', 'col2']) - self.writer = DocbookTableWriter(self.stream, self.table, None) - self.writer.set_renderer(DocbookRenderer()) - - def test_write_table(self): - """make sure write_table() doesn't raise any exception""" - self.writer.write_table() - - def test_abstract_writer(self): - """tests that Abstract Writers can't be used !""" - writer = TableWriter(self.stream, self.table, None) - self.assertRaises(NotImplementedError, writer.write_table) - - -if __name__ == '__main__': - unittest_main() diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_taskqueue.py b/pymode/libs/logilab-common-1.4.1/test/unittest_taskqueue.py deleted file mode 100644 index d8b6a9e7..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_taskqueue.py +++ /dev/null @@ -1,71 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -from logilab.common.testlib import TestCase, unittest_main - -from logilab.common.tasksqueue import * - -class TaskTC(TestCase): - - def test_eq(self): - self.assertFalse(Task('t1') == Task('t2')) - self.assertTrue(Task('t1') == Task('t1')) - - def test_cmp(self): - self.assertTrue(Task('t1', LOW) < Task('t2', MEDIUM)) - self.assertFalse(Task('t1', LOW) > Task('t2', MEDIUM)) - self.assertTrue(Task('t1', HIGH) > Task('t2', MEDIUM)) - self.assertFalse(Task('t1', HIGH) < Task('t2', MEDIUM)) - - -class PrioritizedTasksQueueTC(TestCase): - - def test_priority(self): - queue = PrioritizedTasksQueue() - queue.put(Task('t1')) - queue.put(Task('t2', MEDIUM)) - queue.put(Task('t3', HIGH)) - queue.put(Task('t4', LOW)) - self.assertEqual(queue.get().id, 't3') - self.assertEqual(queue.get().id, 't2') - self.assertEqual(queue.get().id, 't1') - self.assertEqual(queue.get().id, 't4') - - def test_remove_equivalent(self): - queue = PrioritizedTasksQueue() - queue.put(Task('t1')) - queue.put(Task('t2', MEDIUM)) - queue.put(Task('t1', HIGH)) - queue.put(Task('t3', MEDIUM)) - queue.put(Task('t2', MEDIUM)) - self.assertEqual(queue.qsize(), 3) - self.assertEqual(queue.get().id, 't1') - self.assertEqual(queue.get().id, 't2') - self.assertEqual(queue.get().id, 't3') - self.assertEqual(queue.qsize(), 0) - - def test_remove(self): - queue = PrioritizedTasksQueue() - queue.put(Task('t1')) - queue.put(Task('t2')) - queue.put(Task('t3')) - queue.remove('t2') - self.assertEqual([t.id for t in queue], ['t3', 't1']) - self.assertRaises(ValueError, queue.remove, 't4') - -if __name__ == '__main__': - unittest_main() diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_testlib.py b/pymode/libs/logilab-common-1.4.1/test/unittest_testlib.py deleted file mode 100644 index fe2e31a8..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_testlib.py +++ /dev/null @@ -1,790 +0,0 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""unittest module for logilab.comon.testlib""" - -from __future__ import print_function - -import os -import sys -from os.path import join, dirname, isdir, isfile, abspath, exists -import tempfile -import shutil - -try: - __file__ -except NameError: - __file__ = sys.argv[0] - -from six.moves import range - -from logilab.common.compat import StringIO -from logilab.common.testlib import (unittest, TestSuite, unittest_main, Tags, - TestCase, mock_object, create_files, InnerTest, with_tempdir, tag, - require_version, require_module) -from logilab.common.pytest import SkipAwareTextTestRunner, NonStrictTestLoader - - -class MockTestCase(TestCase): - def __init__(self): - # Do not call unittest.TestCase's __init__ - pass - - def fail(self, msg): - raise AssertionError(msg) - -class UtilTC(TestCase): - - def test_mockobject(self): - obj = mock_object(foo='bar', baz='bam') - self.assertEqual(obj.foo, 'bar') - self.assertEqual(obj.baz, 'bam') - - def test_create_files(self): - chroot = tempfile.mkdtemp() - path_to = lambda path: join(chroot, path) - dircontent = lambda path: sorted(os.listdir(join(chroot, path))) - try: - self.assertFalse(isdir(path_to('a/'))) - create_files(['a/b/foo.py', 'a/b/c/', 'a/b/c/d/e.py'], chroot) - # make sure directories exist - self.assertTrue(isdir(path_to('a'))) - self.assertTrue(isdir(path_to('a/b'))) - self.assertTrue(isdir(path_to('a/b/c'))) - self.assertTrue(isdir(path_to('a/b/c/d'))) - # make sure files exist - self.assertTrue(isfile(path_to('a/b/foo.py'))) - self.assertTrue(isfile(path_to('a/b/c/d/e.py'))) - # make sure only asked files were created - self.assertEqual(dircontent('a'), ['b']) - self.assertEqual(dircontent('a/b'), ['c', 'foo.py']) - self.assertEqual(dircontent('a/b/c'), ['d']) - self.assertEqual(dircontent('a/b/c/d'), ['e.py']) - finally: - shutil.rmtree(chroot) - - -class TestlibTC(TestCase): - - def mkdir(self, path): - if not exists(path): - self._dirs.add(path) - os.mkdir(path) - - def setUp(self): - self.tc = MockTestCase() - self._dirs = set() - - def tearDown(self): - while(self._dirs): - shutil.rmtree(self._dirs.pop(), ignore_errors=True) - - def test_dict_equals(self): - """tests TestCase.assertDictEqual""" - d1 = {'a' : 1, 'b' : 2} - d2 = {'a' : 1, 'b' : 3} - d3 = dict(d1) - self.assertRaises(AssertionError, self.tc.assertDictEqual, d1, d2) - self.tc.assertDictEqual(d1, d3) - self.tc.assertDictEqual(d3, d1) - self.tc.assertDictEqual(d1, d1) - - def test_list_equals(self): - """tests TestCase.assertListEqual""" - l1 = list(range(10)) - l2 = list(range(5)) - l3 = list(range(10)) - self.assertRaises(AssertionError, self.tc.assertListEqual, l1, l2) - self.tc.assertListEqual(l1, l1) - self.tc.assertListEqual(l1, l3) - self.tc.assertListEqual(l3, l1) - - def test_equality_for_sets(self): - s1 = set('ab') - s2 = set('a') - self.assertRaises(AssertionError, self.tc.assertSetEqual, s1, s2) - self.tc.assertSetEqual(s1, s1) - self.tc.assertSetEqual(set(), set()) - - def test_text_equality(self): - self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, "toto", 12) - self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, "toto", 12) - self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, "toto", None) - self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, "toto", None) - self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, 3.12, u"toto") - self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, 3.12, u"toto") - self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, None, u"toto") - self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, None, u"toto") - self.tc.assertMultiLineEqual('toto\ntiti', 'toto\ntiti') - self.tc.assertMultiLineEqual('toto\ntiti', 'toto\ntiti') - self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, 'toto\ntiti', 'toto\n titi\n') - self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, 'toto\ntiti', 'toto\n titi\n') - foo = join(dirname(__file__), 'data', 'foo.txt') - spam = join(dirname(__file__), 'data', 'spam.txt') - with open(foo) as fobj: - text1 = fobj.read() - self.tc.assertMultiLineEqual(text1, text1) - self.tc.assertMultiLineEqual(text1, text1) - with open(spam) as fobj: - text2 = fobj.read() - self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, text1, text2) - self.assertRaises(AssertionError, self.tc.assertMultiLineEqual, text1, text2) - - def test_default_datadir(self): - expected_datadir = join(dirname(abspath(__file__)), 'data') - self.assertEqual(self.datadir, expected_datadir) - self.assertEqual(self.datapath('foo'), join(expected_datadir, 'foo')) - - def test_multiple_args_datadir(self): - expected_datadir = join(dirname(abspath(__file__)), 'data') - self.assertEqual(self.datadir, expected_datadir) - self.assertEqual(self.datapath('foo', 'bar'), join(expected_datadir, 'foo', 'bar')) - - def test_custom_datadir(self): - class MyTC(TestCase): - datadir = 'foo' - def test_1(self): pass - - # class' custom datadir - tc = MyTC('test_1') - self.assertEqual(tc.datapath('bar'), join('foo', 'bar')) - - def test_cached_datadir(self): - """test datadir is cached on the class""" - class MyTC(TestCase): - def test_1(self): pass - - expected_datadir = join(dirname(abspath(__file__)), 'data') - tc = MyTC('test_1') - self.assertEqual(tc.datadir, expected_datadir) - # changing module should not change the datadir - MyTC.__module__ = 'os' - self.assertEqual(tc.datadir, expected_datadir) - # even on new instances - tc2 = MyTC('test_1') - self.assertEqual(tc2.datadir, expected_datadir) - - def test_is(self): - obj_1 = [] - obj_2 = [] - self.assertIs(obj_1, obj_1) - self.assertRaises(AssertionError, self.assertIs, obj_1, obj_2) - - def test_isnot(self): - obj_1 = [] - obj_2 = [] - self.assertIsNot(obj_1, obj_2) - self.assertRaises(AssertionError, self.assertIsNot, obj_1, obj_1) - - def test_none(self): - self.assertIsNone(None) - self.assertRaises(AssertionError, self.assertIsNone, object()) - - def test_not_none(self): - self.assertIsNotNone(object()) - self.assertRaises(AssertionError, self.assertIsNotNone, None) - - def test_in(self): - self.assertIn("a", "dsqgaqg") - obj, seq = 'a', ('toto', "azf", "coin") - self.assertRaises(AssertionError, self.assertIn, obj, seq) - - def test_not_in(self): - self.assertNotIn('a', ('toto', "azf", "coin")) - self.assertRaises(AssertionError, self.assertNotIn, 'a', "dsqgaqg") - - -class GenerativeTestsTC(TestCase): - - def setUp(self): - output = StringIO() - self.runner = SkipAwareTextTestRunner(stream=output) - - def test_generative_ok(self): - class FooTC(TestCase): - def test_generative(self): - for i in range(10): - yield self.assertEqual, i, i - result = self.runner.run(FooTC('test_generative')) - self.assertEqual(result.testsRun, 10) - self.assertEqual(len(result.failures), 0) - self.assertEqual(len(result.errors), 0) - - def test_generative_half_bad(self): - class FooTC(TestCase): - def test_generative(self): - for i in range(10): - yield self.assertEqual, i%2, 0 - result = self.runner.run(FooTC('test_generative')) - self.assertEqual(result.testsRun, 10) - self.assertEqual(len(result.failures), 5) - self.assertEqual(len(result.errors), 0) - - def test_generative_error(self): - class FooTC(TestCase): - def test_generative(self): - for i in range(10): - if i == 5: - raise ValueError('STOP !') - yield self.assertEqual, i, i - - result = self.runner.run(FooTC('test_generative')) - self.assertEqual(result.testsRun, 5) - self.assertEqual(len(result.failures), 0) - self.assertEqual(len(result.errors), 1) - - def test_generative_error2(self): - class FooTC(TestCase): - def test_generative(self): - for i in range(10): - if i == 5: - yield self.ouch - yield self.assertEqual, i, i - def ouch(self): raise ValueError('stop !') - result = self.runner.run(FooTC('test_generative')) - self.assertEqual(result.testsRun, 11) - self.assertEqual(len(result.failures), 0) - self.assertEqual(len(result.errors), 1) - - def test_generative_setup(self): - class FooTC(TestCase): - def setUp(self): - raise ValueError('STOP !') - def test_generative(self): - for i in range(10): - yield self.assertEqual, i, i - - result = self.runner.run(FooTC('test_generative')) - self.assertEqual(result.testsRun, 1) - self.assertEqual(len(result.failures), 0) - self.assertEqual(len(result.errors), 1) - - def test_generative_inner_skip(self): - class FooTC(TestCase): - def check(self, val): - if val == 5: - self.innerSkip("no 5") - else: - self.assertEqual(val, val) - - def test_generative(self): - for i in range(10): - yield InnerTest("check_%s"%i, self.check, i) - - result = self.runner.run(FooTC('test_generative')) - self.assertEqual(result.testsRun, 10) - self.assertEqual(len(result.failures), 0) - self.assertEqual(len(result.errors), 0) - self.assertEqual(len(result.skipped), 1) - - def test_generative_skip(self): - class FooTC(TestCase): - def check(self, val): - if val == 5: - self.skipTest("no 5") - else: - self.assertEqual(val, val) - - def test_generative(self): - for i in range(10): - yield InnerTest("check_%s"%i, self.check, i) - - result = self.runner.run(FooTC('test_generative')) - self.assertEqual(result.testsRun, 10) - self.assertEqual(len(result.failures), 0) - self.assertEqual(len(result.errors), 0) - self.assertEqual(len(result.skipped), 1) - - def test_generative_inner_error(self): - class FooTC(TestCase): - def check(self, val): - if val == 5: - raise ValueError("no 5") - else: - self.assertEqual(val, val) - - def test_generative(self): - for i in range(10): - yield InnerTest("check_%s"%i, self.check, i) - - result = self.runner.run(FooTC('test_generative')) - self.assertEqual(result.testsRun, 10) - self.assertEqual(len(result.failures), 0) - self.assertEqual(len(result.errors), 1) - self.assertEqual(len(result.skipped), 0) - - def test_generative_inner_failure(self): - class FooTC(TestCase): - def check(self, val): - if val == 5: - self.assertEqual(val, val+1) - else: - self.assertEqual(val, val) - - def test_generative(self): - for i in range(10): - yield InnerTest("check_%s"%i, self.check, i) - - result = self.runner.run(FooTC('test_generative')) - self.assertEqual(result.testsRun, 10) - self.assertEqual(len(result.failures), 1) - self.assertEqual(len(result.errors), 0) - self.assertEqual(len(result.skipped), 0) - - - def test_generative_outer_failure(self): - class FooTC(TestCase): - def test_generative(self): - self.fail() - yield - - result = self.runner.run(FooTC('test_generative')) - self.assertEqual(result.testsRun, 0) - self.assertEqual(len(result.failures), 1) - self.assertEqual(len(result.errors), 0) - self.assertEqual(len(result.skipped), 0) - - def test_generative_outer_skip(self): - class FooTC(TestCase): - def test_generative(self): - self.skipTest('blah') - yield - - result = self.runner.run(FooTC('test_generative')) - self.assertEqual(result.testsRun, 0) - self.assertEqual(len(result.failures), 0) - self.assertEqual(len(result.errors), 0) - self.assertEqual(len(result.skipped), 1) - - -class ExitFirstTC(TestCase): - def setUp(self): - output = StringIO() - self.runner = SkipAwareTextTestRunner(stream=output, exitfirst=True) - - def test_failure_exit_first(self): - class FooTC(TestCase): - def test_1(self): pass - def test_2(self): assert False - def test_3(self): pass - tests = [FooTC('test_1'), FooTC('test_2')] - result = self.runner.run(TestSuite(tests)) - self.assertEqual(result.testsRun, 2) - self.assertEqual(len(result.failures), 1) - self.assertEqual(len(result.errors), 0) - - def test_error_exit_first(self): - class FooTC(TestCase): - def test_1(self): pass - def test_2(self): raise ValueError() - def test_3(self): pass - tests = [FooTC('test_1'), FooTC('test_2'), FooTC('test_3')] - result = self.runner.run(TestSuite(tests)) - self.assertEqual(result.testsRun, 2) - self.assertEqual(len(result.failures), 0) - self.assertEqual(len(result.errors), 1) - - def test_generative_exit_first(self): - class FooTC(TestCase): - def test_generative(self): - for i in range(10): - yield self.assertTrue, False - result = self.runner.run(FooTC('test_generative')) - self.assertEqual(result.testsRun, 1) - self.assertEqual(len(result.failures), 1) - self.assertEqual(len(result.errors), 0) - - -class TestLoaderTC(TestCase): - ## internal classes for test purposes ######## - class FooTC(TestCase): - def test_foo1(self): pass - def test_foo2(self): pass - def test_bar1(self): pass - - class BarTC(TestCase): - def test_bar2(self): pass - ############################################## - - def setUp(self): - self.loader = NonStrictTestLoader() - self.module = TestLoaderTC # mock_object(FooTC=TestLoaderTC.FooTC, BarTC=TestLoaderTC.BarTC) - self.output = StringIO() - self.runner = SkipAwareTextTestRunner(stream=self.output) - - def assertRunCount(self, pattern, module, expected_count, skipped=()): - self.loader.test_pattern = pattern - self.loader.skipped_patterns = skipped - if pattern: - suite = self.loader.loadTestsFromNames([pattern], module) - else: - suite = self.loader.loadTestsFromModule(module) - result = self.runner.run(suite) - self.loader.test_pattern = None - self.loader.skipped_patterns = () - self.assertEqual(result.testsRun, expected_count) - - def test_collect_everything(self): - """make sure we don't change the default behaviour - for loadTestsFromModule() and loadTestsFromTestCase - """ - testsuite = self.loader.loadTestsFromModule(self.module) - self.assertEqual(len(testsuite._tests), 2) - suite1, suite2 = testsuite._tests - self.assertEqual(len(suite1._tests) + len(suite2._tests), 4) - - def test_collect_with_classname(self): - self.assertRunCount('FooTC', self.module, 3) - self.assertRunCount('BarTC', self.module, 1) - - def test_collect_with_classname_and_pattern(self): - data = [('FooTC.test_foo1', 1), ('FooTC.test_foo', 2), ('FooTC.test_fo', 2), - ('FooTC.foo1', 1), ('FooTC.foo', 2), ('FooTC.whatever', 0) - ] - for pattern, expected_count in data: - yield self.assertRunCount, pattern, self.module, expected_count - - def test_collect_with_pattern(self): - data = [('test_foo1', 1), ('test_foo', 2), ('test_bar', 2), - ('foo1', 1), ('foo', 2), ('bar', 2), ('ba', 2), - ('test', 4), ('ab', 0), - ] - for pattern, expected_count in data: - yield self.assertRunCount, pattern, self.module, expected_count - - def test_testcase_with_custom_metaclass(self): - class mymetaclass(type): pass - class MyMod: - class MyTestCase(TestCase): - __metaclass__ = mymetaclass - def test_foo1(self): pass - def test_foo2(self): pass - def test_bar(self): pass - data = [('test_foo1', 1), ('test_foo', 2), ('test_bar', 1), - ('foo1', 1), ('foo', 2), ('bar', 1), ('ba', 1), - ('test', 3), ('ab', 0), - ('MyTestCase.test_foo1', 1), ('MyTestCase.test_foo', 2), - ('MyTestCase.test_fo', 2), ('MyTestCase.foo1', 1), - ('MyTestCase.foo', 2), ('MyTestCase.whatever', 0) - ] - for pattern, expected_count in data: - yield self.assertRunCount, pattern, MyMod, expected_count - - def test_collect_everything_and_skipped_patterns(self): - testdata = [ (['foo1'], 3), (['foo'], 2), - (['foo', 'bar'], 0), ] - for skipped, expected_count in testdata: - yield self.assertRunCount, None, self.module, expected_count, skipped - - def test_collect_specific_pattern_and_skip_some(self): - testdata = [ ('bar', ['foo1'], 2), ('bar', [], 2), - ('bar', ['bar'], 0), ] - for runpattern, skipped, expected_count in testdata: - yield self.assertRunCount, runpattern, self.module, expected_count, skipped - - def test_skip_classname(self): - testdata = [ (['BarTC'], 3), (['FooTC'], 1), ] - for skipped, expected_count in testdata: - yield self.assertRunCount, None, self.module, expected_count, skipped - - def test_skip_classname_and_specific_collect(self): - testdata = [ ('bar', ['BarTC'], 1), ('foo', ['FooTC'], 0), ] - for runpattern, skipped, expected_count in testdata: - yield self.assertRunCount, runpattern, self.module, expected_count, skipped - - def test_nonregr_dotted_path(self): - self.assertRunCount('FooTC.test_foo', self.module, 2) - - def test_inner_tests_selection(self): - class MyMod: - class MyTestCase(TestCase): - def test_foo(self): pass - def test_foobar(self): - for i in range(5): - if i%2 == 0: - yield InnerTest('even', lambda: None) - else: - yield InnerTest('odd', lambda: None) - yield lambda: None - - # FIXME InnerTest masked by pattern usage - # data = [('foo', 7), ('test_foobar', 6), ('even', 3), ('odd', 2), ] - data = [('foo', 7), ('test_foobar', 6), ('even', 0), ('odd', 0), ] - for pattern, expected_count in data: - yield self.assertRunCount, pattern, MyMod, expected_count - - def test_nonregr_class_skipped_option(self): - class MyMod: - class MyTestCase(TestCase): - def test_foo(self): pass - def test_bar(self): pass - class FooTC(TestCase): - def test_foo(self): pass - self.assertRunCount('foo', MyMod, 2) - self.assertRunCount(None, MyMod, 3) - self.assertRunCount('foo', MyMod, 1, ['FooTC']) - self.assertRunCount(None, MyMod, 2, ['FooTC']) - - def test__classes_are_ignored(self): - class MyMod: - class _Base(TestCase): - def test_1(self): pass - class MyTestCase(_Base): - def test_2(self): pass - self.assertRunCount(None, MyMod, 2) - - -class DecoratorTC(TestCase): - - @with_tempdir - def test_tmp_dir_normal_1(self): - tempdir = tempfile.gettempdir() - # assert temp directory is empty - self.assertListEqual(list(os.walk(tempdir)), - [(tempdir, [], [])]) - - witness = [] - - @with_tempdir - def createfile(list): - fd1, fn1 = tempfile.mkstemp() - fd2, fn2 = tempfile.mkstemp() - dir = tempfile.mkdtemp() - fd3, fn3 = tempfile.mkstemp(dir=dir) - tempfile.mkdtemp() - list.append(True) - for fd in (fd1, fd2, fd3): - os.close(fd) - - self.assertFalse(witness) - createfile(witness) - self.assertTrue(witness) - - self.assertEqual(tempfile.gettempdir(), tempdir) - - # assert temp directory is empty - self.assertListEqual(list(os.walk(tempdir)), - [(tempdir, [], [])]) - - @with_tempdir - def test_tmp_dir_normal_2(self): - tempdir = tempfile.gettempdir() - # assert temp directory is empty - self.assertListEqual(list(os.walk(tempfile.tempdir)), - [(tempfile.tempdir, [], [])]) - - - class WitnessException(Exception): - pass - - @with_tempdir - def createfile(): - fd1, fn1 = tempfile.mkstemp() - fd2, fn2 = tempfile.mkstemp() - dir = tempfile.mkdtemp() - fd3, fn3 = tempfile.mkstemp(dir=dir) - tempfile.mkdtemp() - for fd in (fd1, fd2, fd3): - os.close(fd) - raise WitnessException() - - self.assertRaises(WitnessException, createfile) - - # assert tempdir didn't change - self.assertEqual(tempfile.gettempdir(), tempdir) - - # assert temp directory is empty - self.assertListEqual(list(os.walk(tempdir)), - [(tempdir, [], [])]) - - def test_tmpdir_generator(self): - orig_tempdir = tempfile.gettempdir() - - @with_tempdir - def gen(): - yield tempfile.gettempdir() - - for tempdir in gen(): - self.assertNotEqual(orig_tempdir, tempdir) - self.assertEqual(orig_tempdir, tempfile.gettempdir()) - - def setUp(self): - self.pyversion = sys.version_info - - def tearDown(self): - sys.version_info = self.pyversion - - def test_require_version_good(self): - """ should return the same function - """ - def func() : - pass - sys.version_info = (2, 5, 5, 'final', 4) - current = sys.version_info[:3] - compare = ('2.4', '2.5', '2.5.4', '2.5.5') - for version in compare: - decorator = require_version(version) - self.assertEqual(func, decorator(func), '%s =< %s : function \ - return by the decorator should be the same.' % (version, - '.'.join([str(element) for element in current]))) - - def test_require_version_bad(self): - """ should return a different function : skipping test - """ - def func() : - pass - sys.version_info = (2, 5, 5, 'final', 4) - current = sys.version_info[:3] - compare = ('2.5.6', '2.6', '2.6.5') - for version in compare: - decorator = require_version(version) - self.assertNotEqual(func, decorator(func), '%s >= %s : function \ - return by the decorator should NOT be the same.' - % ('.'.join([str(element) for element in current]), version)) - - def test_require_version_exception(self): - """ should throw a ValueError exception - """ - def func() : - pass - compare = ('2.5.a', '2.a', 'azerty') - for version in compare: - decorator = require_version(version) - self.assertRaises(ValueError, decorator, func) - - def test_require_module_good(self): - """ should return the same function - """ - def func() : - pass - module = 'sys' - decorator = require_module(module) - self.assertEqual(func, decorator(func), 'module %s exists : function \ - return by the decorator should be the same.' % module) - - def test_require_module_bad(self): - """ should return a different function : skipping test - """ - def func() : - pass - modules = ('bla', 'blo', 'bli') - for module in modules: - try: - __import__(module) - pass - except ImportError: - decorator = require_module(module) - self.assertNotEqual(func, decorator(func), 'module %s does \ - not exist : function return by the decorator should \ - NOT be the same.' % module) - return - print('all modules in %s exist. Could not test %s' % (', '.join(modules), - sys._getframe().f_code.co_name)) - -class TagTC(TestCase): - - def setUp(self): - @tag('testing', 'bob') - def bob(a, b, c): - return (a + b) * c - - self.func = bob - - class TagTestTC(TestCase): - tags = Tags('one', 'two') - - def test_one(self): - self.assertTrue(True) - - @tag('two', 'three') - def test_two(self): - self.assertTrue(True) - - @tag('three', inherit=False) - def test_three(self): - self.assertTrue(True) - self.cls = TagTestTC - - def test_tag_decorator(self): - bob = self.func - - self.assertEqual(bob(2, 3, 7), 35) - self.assertTrue(hasattr(bob, 'tags')) - self.assertSetEqual(bob.tags, set(['testing', 'bob'])) - - def test_tags_class(self): - tags = self.func.tags - - self.assertTrue(tags['testing']) - self.assertFalse(tags['Not inside']) - - def test_tags_match(self): - tags = self.func.tags - - self.assertTrue(tags.match('testing')) - self.assertFalse(tags.match('other')) - - self.assertFalse(tags.match('testing and coin')) - self.assertTrue(tags.match('testing or other')) - - self.assertTrue(tags.match('not other')) - - self.assertTrue(tags.match('not other or (testing and bibi)')) - self.assertTrue(tags.match('other or (testing and bob)')) - - def test_tagged_class(self): - def options(tags): - class Options(object): - tags_pattern = tags - return Options() - - tc = self.cls('test_one') - - runner = SkipAwareTextTestRunner() - self.assertTrue(runner.does_match_tags(tc.test_one)) - self.assertTrue(runner.does_match_tags(tc.test_two)) - self.assertTrue(runner.does_match_tags(tc.test_three)) - - runner = SkipAwareTextTestRunner(options=options('one')) - self.assertTrue(runner.does_match_tags(tc.test_one)) - self.assertTrue(runner.does_match_tags(tc.test_two)) - self.assertFalse(runner.does_match_tags(tc.test_three)) - - runner = SkipAwareTextTestRunner(options=options('two')) - self.assertTrue(runner.does_match_tags(tc.test_one)) - self.assertTrue(runner.does_match_tags(tc.test_two)) - self.assertFalse(runner.does_match_tags(tc.test_three)) - - runner = SkipAwareTextTestRunner(options=options('three')) - self.assertFalse(runner.does_match_tags(tc.test_one)) - self.assertTrue(runner.does_match_tags(tc.test_two)) - self.assertTrue(runner.does_match_tags(tc.test_three)) - - runner = SkipAwareTextTestRunner(options=options('two or three')) - self.assertTrue(runner.does_match_tags(tc.test_one)) - self.assertTrue(runner.does_match_tags(tc.test_two)) - self.assertTrue(runner.does_match_tags(tc.test_three)) - - runner = SkipAwareTextTestRunner(options=options('two and three')) - self.assertFalse(runner.does_match_tags(tc.test_one)) - self.assertTrue(runner.does_match_tags(tc.test_two)) - self.assertFalse(runner.does_match_tags(tc.test_three)) - - - -if __name__ == '__main__': - unittest_main() diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_textutils.py b/pymode/libs/logilab-common-1.4.1/test/unittest_textutils.py deleted file mode 100644 index 330d49c2..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_textutils.py +++ /dev/null @@ -1,268 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -""" -unit tests for module textutils -squeleton generated by /home/syt/cvs_work/logilab/pyreverse/py2tests.py on Sep 08 at 09:1:31 - -""" -import doctest -import re -from os import linesep - -from logilab.common import textutils as tu -from logilab.common.testlib import TestCase, unittest_main - - -if linesep != '\n': - import re - LINE_RGX = re.compile(linesep) - def ulines(string): - return LINE_RGX.sub('\n', string) -else: - def ulines(string): - return string - -class NormalizeTextTC(TestCase): - - def test_known_values(self): - self.assertEqual(ulines(tu.normalize_text('''some really malformated - text. -With some times some veeeeeeeeeeeeeeerrrrryyyyyyyyyyyyyyyyyyy loooooooooooooooooooooong linnnnnnnnnnnes - -and empty lines! - ''')), - '''some really malformated text. With some times some -veeeeeeeeeeeeeeerrrrryyyyyyyyyyyyyyyyyyy loooooooooooooooooooooong -linnnnnnnnnnnes - -and empty lines!''') - self.assertMultiLineEqual(ulines(tu.normalize_text('''\ -some ReST formated text -======================= -With some times some veeeeeeeeeeeeeeerrrrryyyyyyyyyyyyyyyyyyy loooooooooooooooooooooong linnnnnnnnnnnes -and normal lines! - -another paragraph - ''', rest=True)), - '''\ -some ReST formated text -======================= -With some times some veeeeeeeeeeeeeeerrrrryyyyyyyyyyyyyyyyyyy -loooooooooooooooooooooong linnnnnnnnnnnes -and normal lines! - -another paragraph''') - - def test_nonregr_unsplitable_word(self): - self.assertEqual(ulines(tu.normalize_text('''petit complement : - -http://www.plonefr.net/blog/archive/2005/10/30/tester-la-future-infrastructure-i18n -''', 80)), - '''petit complement : - -http://www.plonefr.net/blog/archive/2005/10/30/tester-la-future-infrastructure-i18n''') - - - def test_nonregr_rest_normalize(self): - self.assertEqual(ulines(tu.normalize_text("""... Il est donc evident que tout le monde doit lire le compte-rendu de RSH et aller discuter avec les autres si c'est utile ou necessaire. - """, rest=True)), """... Il est donc evident que tout le monde doit lire le compte-rendu de RSH et -aller discuter avec les autres si c'est utile ou necessaire.""") - - def test_normalize_rest_paragraph(self): - self.assertEqual(ulines(tu.normalize_rest_paragraph("""**nico**: toto""")), - """**nico**: toto""") - - def test_normalize_rest_paragraph2(self): - self.assertEqual(ulines(tu.normalize_rest_paragraph(""".. _tdm: http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Table-des-matieres/.20_adaa41fb-c125-4919-aece-049601e81c8e_0_0.pdf -.. _extrait: http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Extrait-du-livre/.20_d6eed0be-0d36-4384-be59-2dd09e081012_0_0.pdf""", indent='> ')), - """> .. _tdm: -> http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Table-des-matieres/.20_adaa41fb-c125-4919-aece-049601e81c8e_0_0.pdf -> .. _extrait: -> http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Extrait-du-livre/.20_d6eed0be-0d36-4384-be59-2dd09e081012_0_0.pdf""") - - def test_normalize_paragraph2(self): - self.assertEqual(ulines(tu.normalize_paragraph(""".. _tdm: http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Table-des-matieres/.20_adaa41fb-c125-4919-aece-049601e81c8e_0_0.pdf -.. _extrait: http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Extrait-du-livre/.20_d6eed0be-0d36-4384-be59-2dd09e081012_0_0.pdf""", indent='> ')), - """> .. _tdm: -> http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Table-des-matieres/.20_adaa41fb-c125-4919-aece-049601e81c8e_0_0.pdf -> .. _extrait: -> http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Extrait-du-livre/.20_d6eed0be-0d36-4384-be59-2dd09e081012_0_0.pdf""") - -class NormalizeParagraphTC(TestCase): - - def test_known_values(self): - self.assertEqual(ulines(tu.normalize_text("""This package contains test files shared by the logilab-common package. It isn't -necessary to install this package unless you want to execute or look at -the tests.""", indent=' ', line_len=70)), - """\ - This package contains test files shared by the logilab-common - package. It isn't necessary to install this package unless you want - to execute or look at the tests.""") - - -class GetCsvTC(TestCase): - - def test_known(self): - self.assertEqual(tu.splitstrip('a, b,c '), ['a', 'b', 'c']) - -class UnitsTC(TestCase): - - def setUp(self): - self.units = { - 'm': 60, - 'kb': 1024, - 'mb': 1024*1024, - } - - def test_empty_base(self): - self.assertEqual(tu.apply_units('17', {}), 17) - - def test_empty_inter(self): - def inter(value): - return int(float(value)) * 2 - result = tu.apply_units('12.4', {}, inter=inter) - self.assertEqual(result, 12 * 2) - self.assertIsInstance(result, float) - - def test_empty_final(self): - # int('12.4') raise value error - self.assertRaises(ValueError, tu.apply_units, '12.4', {}, final=int) - - def test_empty_inter_final(self): - result = tu.apply_units('12.4', {}, inter=float, final=int) - self.assertEqual(result, 12) - self.assertIsInstance(result, int) - - def test_blank_base(self): - result = tu.apply_units(' 42 ', {}, final=int) - self.assertEqual(result, 42) - - def test_blank_space(self): - result = tu.apply_units(' 1 337 ', {}, final=int) - self.assertEqual(result, 1337) - - def test_blank_coma(self): - result = tu.apply_units(' 4,298.42 ', {}) - self.assertEqual(result, 4298.42) - - def test_blank_mixed(self): - result = tu.apply_units('45, 317, 337', {}, final=int) - self.assertEqual(result, 45317337) - - def test_unit_singleunit_singleletter(self): - result = tu.apply_units('15m', self.units) - self.assertEqual(result, 15 * self.units['m'] ) - - def test_unit_singleunit_multipleletter(self): - result = tu.apply_units('47KB', self.units) - self.assertEqual(result, 47 * self.units['kb'] ) - - def test_unit_singleunit_caseinsensitive(self): - result = tu.apply_units('47kb', self.units) - self.assertEqual(result, 47 * self.units['kb'] ) - - def test_unit_multipleunit(self): - result = tu.apply_units('47KB 1.5MB', self.units) - self.assertEqual(result, 47 * self.units['kb'] + 1.5 * self.units['mb']) - - def test_unit_with_blank(self): - result = tu.apply_units('1 000 KB', self.units) - self.assertEqual(result, 1000 * self.units['kb']) - - def test_unit_wrong_input(self): - self.assertRaises(ValueError, tu.apply_units, '', self.units) - self.assertRaises(ValueError, tu.apply_units, 'wrong input', self.units) - self.assertRaises(ValueError, tu.apply_units, 'wrong13 input', self.units) - self.assertRaises(ValueError, tu.apply_units, 'wrong input42', self.units) - -RGX = re.compile('abcd') -class PrettyMatchTC(TestCase): - - def test_known(self): - string = 'hiuherabcdef' - self.assertEqual(ulines(tu.pretty_match(RGX.search(string), string)), - 'hiuherabcdef\n ^^^^') - def test_known_values_1(self): - rgx = re.compile('(to*)') - string = 'toto' - match = rgx.search(string) - self.assertEqual(ulines(tu.pretty_match(match, string)), '''toto -^^''') - - def test_known_values_2(self): - rgx = re.compile('(to*)') - string = ''' ... ... to to - ... ... ''' - match = rgx.search(string) - self.assertEqual(ulines(tu.pretty_match(match, string)), ''' ... ... to to - ^^ - ... ...''') - - - -class UnquoteTC(TestCase): - def test(self): - self.assertEqual(tu.unquote('"toto"'), 'toto') - self.assertEqual(tu.unquote("'l'inenarrable toto'"), "l'inenarrable toto") - self.assertEqual(tu.unquote("no quote"), "no quote") - - -class ColorizeAnsiTC(TestCase): - def test_known(self): - self.assertEqual(tu.colorize_ansi('hello', 'blue', 'strike'), '\x1b[9;34mhello\x1b[0m') - self.assertEqual(tu.colorize_ansi('hello', style='strike, inverse'), '\x1b[9;7mhello\x1b[0m') - self.assertEqual(tu.colorize_ansi('hello', None, None), 'hello') - self.assertEqual(tu.colorize_ansi('hello', '', ''), 'hello') - def test_raise(self): - self.assertRaises(KeyError, tu.colorize_ansi, 'hello', 'bleu', None) - self.assertRaises(KeyError, tu.colorize_ansi, 'hello', None, 'italique') - - -class UnormalizeTC(TestCase): - def test_unormalize_no_substitute(self): - data = [(u'\u0153nologie', u'oenologie'), - (u'\u0152nologie', u'OEnologie'), - (u'l\xf8to', u'loto'), - (u'été', u'ete'), - (u'àèùéïîôêç', u'aeueiioec'), - (u'ÀÈÙÉÃÎÔÊÇ', u'AEUEIIOEC'), - (u'\xa0', u' '), # NO-BREAK SPACE managed by NFKD decomposition - (u'\u0154', u'R'), - (u'Pointe d\u2019Yves', u"Pointe d'Yves"), - (u'Bordeaux\u2013Mérignac', u'Bordeaux-Merignac'), - ] - for input, output in data: - yield self.assertEqual, tu.unormalize(input), output - - def test_unormalize_substitute(self): - self.assertEqual(tu.unormalize(u'ab \u8000 cd', substitute='_'), - 'ab _ cd') - - def test_unormalize_backward_compat(self): - self.assertRaises(ValueError, tu.unormalize, u"\u8000") - self.assertEqual(tu.unormalize(u"\u8000", substitute=''), u'') - - -def load_tests(loader, tests, ignore): - tests.addTests(doctest.DocTestSuite(tu)) - return tests - - -if __name__ == '__main__': - unittest_main() diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_tree.py b/pymode/libs/logilab-common-1.4.1/test/unittest_tree.py deleted file mode 100644 index ea5af81a..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_tree.py +++ /dev/null @@ -1,247 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -""" -unit tests for module logilab.common.tree -squeleton generated by /home/syt/bin/py2tests on Jan 20 at 10:43:25 -""" - -from logilab.common.testlib import TestCase, unittest_main -from logilab.common.tree import * - -tree = ('root', ( - ('child_1_1', ( - ('child_2_1', ()), ('child_2_2', ( - ('child_3_1', ()), - )))), - ('child_1_2', (('child_2_3', ()),)))) - -def make_tree(tuple): - n = Node(tuple[0]) - for child in tuple[1]: - n.append(make_tree(child)) - return n - -class Node_ClassTest(TestCase): - """ a basic tree node, caracterised by an id""" - def setUp(self): - """ called before each test from this class """ - self.o = make_tree(tree) - - - def test_flatten(self): - result = [r.id for r in self.o.flatten()] - expected = ['root', 'child_1_1', 'child_2_1', 'child_2_2', 'child_3_1', 'child_1_2', 'child_2_3'] - self.assertListEqual(result, expected) - - def test_flatten_with_outlist(self): - resultnodes = [] - self.o.flatten(resultnodes) - result = [r.id for r in resultnodes] - expected = ['root', 'child_1_1', 'child_2_1', 'child_2_2', 'child_3_1', 'child_1_2', 'child_2_3'] - self.assertListEqual(result, expected) - - - def test_known_values_remove(self): - """ - remove a child node - """ - self.o.remove(self.o.get_node_by_id('child_1_1')) - self.assertRaises(NodeNotFound, self.o.get_node_by_id, 'child_1_1') - - def test_known_values_replace(self): - """ - replace a child node with another - """ - self.o.replace(self.o.get_node_by_id('child_1_1'), Node('hoho')) - self.assertRaises(NodeNotFound, self.o.get_node_by_id, 'child_1_1') - self.assertEqual(self.o.get_node_by_id('hoho'), self.o.children[0]) - - def test_known_values_get_sibling(self): - """ - return the sibling node that has given id - """ - self.assertEqual(self.o.children[0].get_sibling('child_1_2'), self.o.children[1], None) - - def test_raise_get_sibling_NodeNotFound(self): - self.assertRaises(NodeNotFound, self.o.children[0].get_sibling, 'houhou') - - def test_known_values_get_node_by_id(self): - """ - return node in whole hierarchy that has given id - """ - self.assertEqual(self.o.get_node_by_id('child_1_1'), self.o.children[0]) - - def test_raise_get_node_by_id_NodeNotFound(self): - self.assertRaises(NodeNotFound, self.o.get_node_by_id, 'houhou') - - def test_known_values_get_child_by_id(self): - """ - return child of given id - """ - self.assertEqual(self.o.get_child_by_id('child_2_1', recurse=1), self.o.children[0].children[0]) - - def test_raise_get_child_by_id_NodeNotFound(self): - self.assertRaises(NodeNotFound, self.o.get_child_by_id, nid='child_2_1') - self.assertRaises(NodeNotFound, self.o.get_child_by_id, 'houhou') - - def test_known_values_get_child_by_path(self): - """ - return child of given path (path is a list of ids) - """ - self.assertEqual(self.o.get_child_by_path(['root', 'child_1_1', 'child_2_1']), self.o.children[0].children[0]) - - def test_raise_get_child_by_path_NodeNotFound(self): - self.assertRaises(NodeNotFound, self.o.get_child_by_path, ['child_1_1', 'child_2_11']) - - def test_known_values_depth_down(self): - """ - return depth of this node in the tree - """ - self.assertEqual(self.o.depth_down(), 4) - self.assertEqual(self.o.get_child_by_id('child_2_1', True).depth_down(), 1) - - def test_known_values_depth(self): - """ - return depth of this node in the tree - """ - self.assertEqual(self.o.depth(), 0) - self.assertEqual(self.o.get_child_by_id('child_2_1', True).depth(), 2) - - def test_known_values_width(self): - """ - return depth of this node in the tree - """ - self.assertEqual(self.o.width(), 3) - self.assertEqual(self.o.get_child_by_id('child_2_1', True).width(), 1) - - def test_known_values_root(self): - """ - return the root node of the tree - """ - self.assertEqual(self.o.get_child_by_id('child_2_1', True).root(), self.o) - - def test_known_values_leaves(self): - """ - return a list with all the leaf nodes descendant from this task - """ - self.assertEqual(self.o.leaves(), [self.o.get_child_by_id('child_2_1', True), - self.o.get_child_by_id('child_3_1', True), - self.o.get_child_by_id('child_2_3', True)]) - - def test_known_values_lineage(self): - c31 = self.o.get_child_by_id('child_3_1', True) - self.assertEqual(c31.lineage(), [self.o.get_child_by_id('child_3_1', True), - self.o.get_child_by_id('child_2_2', True), - self.o.get_child_by_id('child_1_1', True), - self.o]) - - -class post_order_list_FunctionTest(TestCase): - """""" - def setUp(self): - """ called before each test from this class """ - self.o = make_tree(tree) - - def test_known_values_post_order_list(self): - """ - create a list with tree nodes for which the function returned true - in a post order foashion - """ - L = ['child_2_1', 'child_3_1', 'child_2_2', 'child_1_1', 'child_2_3', 'child_1_2', 'root'] - l = [n.id for n in post_order_list(self.o)] - self.assertEqual(l, L, l) - - def test_known_values_post_order_list2(self): - """ - create a list with tree nodes for which the function returned true - in a post order foashion - """ - def filter(node): - if node.id == 'child_2_2': - return 0 - return 1 - L = ['child_2_1', 'child_1_1', 'child_2_3', 'child_1_2', 'root'] - l = [n.id for n in post_order_list(self.o, filter)] - self.assertEqual(l, L, l) - - -class PostfixedDepthFirstIterator_ClassTest(TestCase): - """""" - def setUp(self): - """ called before each test from this class """ - self.o = make_tree(tree) - - def test_known_values_next(self): - L = ['child_2_1', 'child_3_1', 'child_2_2', 'child_1_1', 'child_2_3', 'child_1_2', 'root'] - iter = PostfixedDepthFirstIterator(self.o) - o = next(iter) - i = 0 - while o: - self.assertEqual(o.id, L[i]) - o = next(iter) - i += 1 - - -class pre_order_list_FunctionTest(TestCase): - """""" - def setUp(self): - """ called before each test from this class """ - self.o = make_tree(tree) - - def test_known_values_pre_order_list(self): - """ - create a list with tree nodes for which the function returned true - in a pre order fashion - """ - L = ['root', 'child_1_1', 'child_2_1', 'child_2_2', 'child_3_1', 'child_1_2', 'child_2_3'] - l = [n.id for n in pre_order_list(self.o)] - self.assertEqual(l, L, l) - - def test_known_values_pre_order_list2(self): - """ - create a list with tree nodes for which the function returned true - in a pre order fashion - """ - def filter(node): - if node.id == 'child_2_2': - return 0 - return 1 - L = ['root', 'child_1_1', 'child_2_1', 'child_1_2', 'child_2_3'] - l = [n.id for n in pre_order_list(self.o, filter)] - self.assertEqual(l, L, l) - - -class PrefixedDepthFirstIterator_ClassTest(TestCase): - """""" - def setUp(self): - """ called before each test from this class """ - self.o = make_tree(tree) - - def test_known_values_next(self): - L = ['root', 'child_1_1', 'child_2_1', 'child_2_2', 'child_3_1', 'child_1_2', 'child_2_3'] - iter = PrefixedDepthFirstIterator(self.o) - o = next(iter) - i = 0 - while o: - self.assertEqual(o.id, L[i]) - o = next(iter) - i += 1 - - -if __name__ == '__main__': - unittest_main() diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_umessage.py b/pymode/libs/logilab-common-1.4.1/test/unittest_umessage.py deleted file mode 100644 index 2841172a..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_umessage.py +++ /dev/null @@ -1,94 +0,0 @@ -# encoding: iso-8859-15 -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -import sys -import email -from os.path import join, dirname, abspath - -from six import text_type - -from logilab.common.testlib import TestCase, unittest_main -from logilab.common.umessage import UMessage, decode_QP, message_from_string - -DATA = join(dirname(abspath(__file__)), 'data') - -class UMessageTC(TestCase): - - def setUp(self): - if sys.version_info >= (3, 2): - import io - msg1 = email.message_from_file(io.open(join(DATA, 'test1.msg'), encoding='utf8')) - msg2 = email.message_from_file(io.open(join(DATA, 'test2.msg'), encoding='utf8')) - else: - msg1 = email.message_from_file(open(join(DATA, 'test1.msg'))) - msg2 = email.message_from_file(open(join(DATA, 'test2.msg'))) - self.umessage1 = UMessage(msg1) - self.umessage2 = UMessage(msg2) - - def test_get_subject(self): - subj = self.umessage2.get('Subject') - self.assertEqual(type(subj), text_type) - self.assertEqual(subj, u'À LA MER') - - def test_get_all(self): - to = self.umessage2.get_all('To') - self.assertEqual(type(to[0]), text_type) - self.assertEqual(to, [u'élément à accents ']) - - def test_get_payload_no_multi(self): - payload = self.umessage1.get_payload() - self.assertEqual(type(payload), text_type) - - def test_get_payload_decode(self): - msg = """\ -MIME-Version: 1.0 -Content-Type: text/plain; charset="utf-8" -Content-Transfer-Encoding: base64 -Subject: =?utf-8?q?b=C3=AFjour?= -From: =?utf-8?q?oim?= -Reply-to: =?utf-8?q?oim?= , =?utf-8?q?BimBam?= -X-CW: data -To: test@logilab.fr -Date: now - -dW4gcGV0aXQgY8O2dWNvdQ== -""" - msg = message_from_string(msg) - self.assertEqual(msg.get_payload(decode=True), u'un petit cöucou') - - def test_decode_QP(self): - test_line = '=??b?UmFwaGHrbA==?= DUPONT' - test = decode_QP(test_line) - self.assertEqual(type(test), text_type) - self.assertEqual(test, u'Raphaël DUPONT') - - def test_decode_QP_utf8(self): - test_line = '=?utf-8?q?o=C3=AEm?= ' - test = decode_QP(test_line) - self.assertEqual(type(test), text_type) - self.assertEqual(test, u'oîm ') - - def test_decode_QP_ascii(self): - test_line = 'test ' - test = decode_QP(test_line) - self.assertEqual(type(test), text_type) - self.assertEqual(test, u'test ') - - -if __name__ == '__main__': - unittest_main() diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_ureports_html.py b/pymode/libs/logilab-common-1.4.1/test/unittest_ureports_html.py deleted file mode 100644 index 2298eec7..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_ureports_html.py +++ /dev/null @@ -1,63 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -'''unit tests for ureports.html_writer -''' - - -from utils import WriterTC -from logilab.common.testlib import TestCase, unittest_main -from logilab.common.ureports.html_writer import * - -class HTMLWriterTC(TestCase, WriterTC): - - def setUp(self): - self.writer = HTMLWriter(1) - - # Section tests ########################################################### - section_base = '''
    -

    Section title

    -

    Section\'s description. -Blabla bla

    -''' - section_nested = '''
    \n

    Section title

    \n

    Section\'s description.\nBlabla bla

    \n

    Subsection

    \n

    Sub section description

    \n
    \n''' - - # List tests ############################################################## - list_base = '''
      \n
    • item1
    • \n
    • item2
    • \n
    • item3
    • \n
    • item4
    • \n
    \n''' - - nested_list = '''
      -
    • blabla

        -
      • 1
      • -
      • 2
      • -
      • 3
      • -
      -

    • -
    • an other point
    • -
    -''' - - # Table tests ############################################################# - table_base = '''\n\n\n\n\n\n\n\n\n
    head1head2
    cell1cell2
    \n''' - field_table = '''\n\n\n\n\n\n\n\n\n\n\n\n\n
    f1v1
    f22v22
    f333v333
    \n''' - advanced_table = '''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
    fieldvalue
    f1v1
    f22v22
    f333v333
    toi perdu ? 
    \n''' - - - # VerbatimText tests ###################################################### - verbatim_base = '''
    blablabla
    ''' - -if __name__ == '__main__': - unittest_main() diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_ureports_text.py b/pymode/libs/logilab-common-1.4.1/test/unittest_ureports_text.py deleted file mode 100644 index dd39dd84..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_ureports_text.py +++ /dev/null @@ -1,104 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -'''unit tests for ureports.text_writer -''' - - -from utils import WriterTC -from logilab.common.testlib import TestCase, unittest_main -from logilab.common.ureports.text_writer import TextWriter - -class TextWriterTC(TestCase, WriterTC): - def setUp(self): - self.writer = TextWriter() - - # Section tests ########################################################### - section_base = ''' -Section title -============= -Section\'s description. -Blabla bla - -''' - section_nested = ''' -Section title -============= -Section\'s description. -Blabla bla - -Subsection ----------- -Sub section description - - -''' - - # List tests ############################################################## - list_base = ''' -* item1 -* item2 -* item3 -* item4''' - - nested_list = ''' -* blabla - - 1 - - 2 - - 3 - -* an other point''' - - # Table tests ############################################################# - table_base = ''' -+------+------+ -|head1 |head2 | -+------+------+ -|cell1 |cell2 | -+------+------+ - -''' - field_table = ''' -f1 : v1 -f22 : v22 -f333: v333 -''' - advanced_table = ''' -+---------------+------+ -|field |value | -+===============+======+ -|f1 |v1 | -+---------------+------+ -|f22 |v22 | -+---------------+------+ -|f333 |v333 | -+---------------+------+ -|`toi perdu ?`_ | | -+---------------+------+ - -''' - - - # VerbatimText tests ###################################################### - verbatim_base = ''':: - - blablabla - -''' - -if __name__ == '__main__': - unittest_main() diff --git a/pymode/libs/logilab-common-1.4.1/test/unittest_xmlutils.py b/pymode/libs/logilab-common-1.4.1/test/unittest_xmlutils.py deleted file mode 100644 index 3d82da93..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/unittest_xmlutils.py +++ /dev/null @@ -1,75 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . - -from logilab.common.testlib import TestCase, unittest_main -from logilab.common.xmlutils import parse_pi_data - - -class ProcessingInstructionDataParsingTest(TestCase): - def test_empty_pi(self): - """ - Tests the parsing of the data of an empty processing instruction. - """ - pi_data = u" \t \n " - data = parse_pi_data(pi_data) - self.assertEqual(data, {}) - - def test_simple_pi_with_double_quotes(self): - """ - Tests the parsing of the data of a simple processing instruction using - double quotes for embedding the value. - """ - pi_data = u""" \t att="value"\n """ - data = parse_pi_data(pi_data) - self.assertEqual(data, {u"att": u"value"}) - - def test_simple_pi_with_simple_quotes(self): - """ - Tests the parsing of the data of a simple processing instruction using - simple quotes for embedding the value. - """ - pi_data = u""" \t att='value'\n """ - data = parse_pi_data(pi_data) - self.assertEqual(data, {u"att": u"value"}) - - def test_complex_pi_with_different_quotes(self): - """ - Tests the parsing of the data of a complex processing instruction using - simple quotes or double quotes for embedding the values. - """ - pi_data = u""" \t att='value'\n att2="value2" att3='value3'""" - data = parse_pi_data(pi_data) - self.assertEqual(data, {u"att": u"value", u"att2": u"value2", - u"att3": u"value3"}) - - def test_pi_with_non_attribute_data(self): - """ - Tests the parsing of the data of a complex processing instruction - containing non-attribute data. - """ - pi_data = u""" \t keyword att1="value1" """ - data = parse_pi_data(pi_data) - self.assertEqual(data, {u"keyword": None, u"att1": u"value1"}) - - -# definitions for automatic unit testing - -if __name__ == '__main__': - unittest_main() - diff --git a/pymode/libs/logilab-common-1.4.1/test/utils.py b/pymode/libs/logilab-common-1.4.1/test/utils.py deleted file mode 100644 index ca1730eb..00000000 --- a/pymode/libs/logilab-common-1.4.1/test/utils.py +++ /dev/null @@ -1,96 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -'''unit tests utilities for ureports -''' - -from __future__ import print_function - -import sys -from io import StringIO -buffers = [StringIO] -if sys.version_info < (3, 0): - from cStringIO import StringIO as cStringIO - from StringIO import StringIO as pStringIO - buffers += [cStringIO, pStringIO] - -from logilab.common.ureports.nodes import * - -class WriterTC: - def _test_output(self, test_id, layout, msg=None): - for buffercls in buffers: - buffer = buffercls() - self.writer.format(layout, buffer) - got = buffer.getvalue() - expected = getattr(self, test_id) - try: - self.assertMultiLineEqual(got, expected) - except: - print('**** using a %s' % buffer.__class__) - print('**** got for %s' % test_id) - print(got) - print('**** while expected') - print(expected) - print('****') - raise - - def test_section(self): - layout = Section('Section title', - 'Section\'s description.\nBlabla bla') - self._test_output('section_base', layout) - layout.append(Section('Subsection', 'Sub section description')) - self._test_output('section_nested', layout) - - def test_verbatim(self): - layout = VerbatimText('blablabla') - self._test_output('verbatim_base', layout) - - - def test_list(self): - layout = List(children=('item1', 'item2', 'item3', 'item4')) - self._test_output('list_base', layout) - - def test_nested_list(self): - layout = List(children=(Paragraph(("blabla", List(children=('1', "2", "3")))), - "an other point")) - self._test_output('nested_list', layout) - - - def test_table(self): - layout = Table(cols=2, children=('head1', 'head2', 'cell1', 'cell2')) - self._test_output('table_base', layout) - - def test_field_table(self): - table = Table(cols=2, klass='field', id='mytable') - for field, value in (('f1', 'v1'), ('f22', 'v22'), ('f333', 'v333')): - table.append(Text(field)) - table.append(Text(value)) - self._test_output('field_table', table) - - def test_advanced_table(self): - table = Table(cols=2, klass='whatever', id='mytable', rheaders=1) - for field, value in (('field', 'value'), ('f1', 'v1'), ('f22', 'v22'), ('f333', 'v333')): - table.append(Text(field)) - table.append(Text(value)) - table.append(Link('http://www.perdu.com', 'toi perdu ?')) - table.append(Text('')) - self._test_output('advanced_table', table) - - -## def test_image(self): -## layout = Verbatim('blablabla') -## self._test_output('verbatim_base', layout) From e253ddfbe316f27f0329576f509ca2ebe8732d24 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 14 Dec 2019 13:11:51 -0300 Subject: [PATCH 07/81] Remove pkg_resources hardcoded dep lib --- pymode/libs/pkg_resources/__init__.py | 3146 ----------------- pymode/libs/pkg_resources/_vendor/__init__.py | 0 .../_vendor/packaging/__about__.py | 31 - .../_vendor/packaging/__init__.py | 24 - .../_vendor/packaging/_compat.py | 40 - .../_vendor/packaging/_structures.py | 78 - .../_vendor/packaging/specifiers.py | 784 ---- .../_vendor/packaging/version.py | 403 --- 8 files changed, 4506 deletions(-) delete mode 100644 pymode/libs/pkg_resources/__init__.py delete mode 100644 pymode/libs/pkg_resources/_vendor/__init__.py delete mode 100644 pymode/libs/pkg_resources/_vendor/packaging/__about__.py delete mode 100644 pymode/libs/pkg_resources/_vendor/packaging/__init__.py delete mode 100644 pymode/libs/pkg_resources/_vendor/packaging/_compat.py delete mode 100644 pymode/libs/pkg_resources/_vendor/packaging/_structures.py delete mode 100644 pymode/libs/pkg_resources/_vendor/packaging/specifiers.py delete mode 100644 pymode/libs/pkg_resources/_vendor/packaging/version.py diff --git a/pymode/libs/pkg_resources/__init__.py b/pymode/libs/pkg_resources/__init__.py deleted file mode 100644 index e6201c88..00000000 --- a/pymode/libs/pkg_resources/__init__.py +++ /dev/null @@ -1,3146 +0,0 @@ -""" -Package resource API --------------------- - -A resource is a logical file contained within a package, or a logical -subdirectory thereof. The package resource API expects resource names -to have their path parts separated with ``/``, *not* whatever the local -path separator is. Do not use os.path operations to manipulate resource -names being passed into the API. - -The package resource API is designed to work with normal filesystem packages, -.egg files, and unpacked .egg files. It can also work in a limited way with -.zip files and with custom PEP 302 loaders that support the ``get_data()`` -method. -""" - -from __future__ import absolute_import - -import sys -import os -import io -import time -import re -import types -import zipfile -import zipimport -import warnings -import stat -import functools -import pkgutil -import token -import symbol -import operator -import platform -import collections -import plistlib -import email.parser -import tempfile -import textwrap -from pkgutil import get_importer - -try: - import _imp -except ImportError: - # Python 3.2 compatibility - import imp as _imp - -PY3 = sys.version_info > (3,) -PY2 = not PY3 - -if PY3: - from urllib.parse import urlparse, urlunparse - -if PY2: - from urlparse import urlparse, urlunparse - -if PY3: - string_types = str, -else: - string_types = str, eval('unicode') - -iteritems = (lambda i: i.items()) if PY3 else lambda i: i.iteritems() - -# capture these to bypass sandboxing -from os import utime -try: - from os import mkdir, rename, unlink - WRITE_SUPPORT = True -except ImportError: - # no write support, probably under GAE - WRITE_SUPPORT = False - -from os import open as os_open -from os.path import isdir, split - -# Avoid try/except due to potential problems with delayed import mechanisms. -if sys.version_info >= (3, 3) and sys.implementation.name == "cpython": - import importlib.machinery as importlib_machinery -else: - importlib_machinery = None - -try: - import parser -except ImportError: - pass - -try: - import pkg_resources._vendor.packaging.version - import pkg_resources._vendor.packaging.specifiers - packaging = pkg_resources._vendor.packaging -except ImportError: - # fallback to naturally-installed version; allows system packagers to - # omit vendored packages. - import packaging.version - import packaging.specifiers - - -# declare some globals that will be defined later to -# satisfy the linters. -require = None -working_set = None - - -class PEP440Warning(RuntimeWarning): - """ - Used when there is an issue with a version or specifier not complying with - PEP 440. - """ - - -class _SetuptoolsVersionMixin(object): - - def __hash__(self): - return super(_SetuptoolsVersionMixin, self).__hash__() - - def __lt__(self, other): - if isinstance(other, tuple): - return tuple(self) < other - else: - return super(_SetuptoolsVersionMixin, self).__lt__(other) - - def __le__(self, other): - if isinstance(other, tuple): - return tuple(self) <= other - else: - return super(_SetuptoolsVersionMixin, self).__le__(other) - - def __eq__(self, other): - if isinstance(other, tuple): - return tuple(self) == other - else: - return super(_SetuptoolsVersionMixin, self).__eq__(other) - - def __ge__(self, other): - if isinstance(other, tuple): - return tuple(self) >= other - else: - return super(_SetuptoolsVersionMixin, self).__ge__(other) - - def __gt__(self, other): - if isinstance(other, tuple): - return tuple(self) > other - else: - return super(_SetuptoolsVersionMixin, self).__gt__(other) - - def __ne__(self, other): - if isinstance(other, tuple): - return tuple(self) != other - else: - return super(_SetuptoolsVersionMixin, self).__ne__(other) - - def __getitem__(self, key): - return tuple(self)[key] - - def __iter__(self): - component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE) - replace = { - 'pre': 'c', - 'preview': 'c', - '-': 'final-', - 'rc': 'c', - 'dev': '@', - }.get - - def _parse_version_parts(s): - for part in component_re.split(s): - part = replace(part, part) - if not part or part == '.': - continue - if part[:1] in '0123456789': - # pad for numeric comparison - yield part.zfill(8) - else: - yield '*'+part - - # ensure that alpha/beta/candidate are before final - yield '*final' - - def old_parse_version(s): - parts = [] - for part in _parse_version_parts(s.lower()): - if part.startswith('*'): - # remove '-' before a prerelease tag - if part < '*final': - while parts and parts[-1] == '*final-': - parts.pop() - # remove trailing zeros from each series of numeric parts - while parts and parts[-1] == '00000000': - parts.pop() - parts.append(part) - return tuple(parts) - - # Warn for use of this function - warnings.warn( - "You have iterated over the result of " - "pkg_resources.parse_version. This is a legacy behavior which is " - "inconsistent with the new version class introduced in setuptools " - "8.0. In most cases, conversion to a tuple is unnecessary. For " - "comparison of versions, sort the Version instances directly. If " - "you have another use case requiring the tuple, please file a " - "bug with the setuptools project describing that need.", - RuntimeWarning, - stacklevel=1, - ) - - for part in old_parse_version(str(self)): - yield part - - -class SetuptoolsVersion(_SetuptoolsVersionMixin, packaging.version.Version): - pass - - -class SetuptoolsLegacyVersion(_SetuptoolsVersionMixin, - packaging.version.LegacyVersion): - pass - - -def parse_version(v): - try: - return SetuptoolsVersion(v) - except packaging.version.InvalidVersion: - return SetuptoolsLegacyVersion(v) - - -_state_vars = {} - -def _declare_state(vartype, **kw): - globals().update(kw) - _state_vars.update(dict.fromkeys(kw, vartype)) - -def __getstate__(): - state = {} - g = globals() - for k, v in _state_vars.items(): - state[k] = g['_sget_'+v](g[k]) - return state - -def __setstate__(state): - g = globals() - for k, v in state.items(): - g['_sset_'+_state_vars[k]](k, g[k], v) - return state - -def _sget_dict(val): - return val.copy() - -def _sset_dict(key, ob, state): - ob.clear() - ob.update(state) - -def _sget_object(val): - return val.__getstate__() - -def _sset_object(key, ob, state): - ob.__setstate__(state) - -_sget_none = _sset_none = lambda *args: None - - -def get_supported_platform(): - """Return this platform's maximum compatible version. - - distutils.util.get_platform() normally reports the minimum version - of Mac OS X that would be required to *use* extensions produced by - distutils. But what we want when checking compatibility is to know the - version of Mac OS X that we are *running*. To allow usage of packages that - explicitly require a newer version of Mac OS X, we must also know the - current version of the OS. - - If this condition occurs for any other platform with a version in its - platform strings, this function should be extended accordingly. - """ - plat = get_build_platform() - m = macosVersionString.match(plat) - if m is not None and sys.platform == "darwin": - try: - plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) - except ValueError: - # not Mac OS X - pass - return plat - -__all__ = [ - # Basic resource access and distribution/entry point discovery - 'require', 'run_script', 'get_provider', 'get_distribution', - 'load_entry_point', 'get_entry_map', 'get_entry_info', - 'iter_entry_points', - 'resource_string', 'resource_stream', 'resource_filename', - 'resource_listdir', 'resource_exists', 'resource_isdir', - - # Environmental control - 'declare_namespace', 'working_set', 'add_activation_listener', - 'find_distributions', 'set_extraction_path', 'cleanup_resources', - 'get_default_cache', - - # Primary implementation classes - 'Environment', 'WorkingSet', 'ResourceManager', - 'Distribution', 'Requirement', 'EntryPoint', - - # Exceptions - 'ResolutionError', 'VersionConflict', 'DistributionNotFound', - 'UnknownExtra', 'ExtractionError', - - # Warnings - 'PEP440Warning', - - # Parsing functions and string utilities - 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', - 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', - 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', - - # filesystem utilities - 'ensure_directory', 'normalize_path', - - # Distribution "precedence" constants - 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', - - # "Provider" interfaces, implementations, and registration/lookup APIs - 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', - 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', - 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', - 'register_finder', 'register_namespace_handler', 'register_loader_type', - 'fixup_namespace_packages', 'get_importer', - - # Deprecated/backward compatibility only - 'run_main', 'AvailableDistributions', -] - -class ResolutionError(Exception): - """Abstract base for dependency resolution errors""" - def __repr__(self): - return self.__class__.__name__+repr(self.args) - - -class VersionConflict(ResolutionError): - """ - An already-installed version conflicts with the requested version. - - Should be initialized with the installed Distribution and the requested - Requirement. - """ - - _template = "{self.dist} is installed but {self.req} is required" - - @property - def dist(self): - return self.args[0] - - @property - def req(self): - return self.args[1] - - def report(self): - return self._template.format(**locals()) - - def with_context(self, required_by): - """ - If required_by is non-empty, return a version of self that is a - ContextualVersionConflict. - """ - if not required_by: - return self - args = self.args + (required_by,) - return ContextualVersionConflict(*args) - - -class ContextualVersionConflict(VersionConflict): - """ - A VersionConflict that accepts a third parameter, the set of the - requirements that required the installed Distribution. - """ - - _template = VersionConflict._template + ' by {self.required_by}' - - @property - def required_by(self): - return self.args[2] - - -class DistributionNotFound(ResolutionError): - """A requested distribution was not found""" - - _template = ("The '{self.req}' distribution was not found " - "and is required by {self.requirers_str}") - - @property - def req(self): - return self.args[0] - - @property - def requirers(self): - return self.args[1] - - @property - def requirers_str(self): - if not self.requirers: - return 'the application' - return ', '.join(self.requirers) - - def report(self): - return self._template.format(**locals()) - - def __str__(self): - return self.report() - - -class UnknownExtra(ResolutionError): - """Distribution doesn't have an "extra feature" of the given name""" -_provider_factories = {} - -PY_MAJOR = sys.version[:3] -EGG_DIST = 3 -BINARY_DIST = 2 -SOURCE_DIST = 1 -CHECKOUT_DIST = 0 -DEVELOP_DIST = -1 - -def register_loader_type(loader_type, provider_factory): - """Register `provider_factory` to make providers for `loader_type` - - `loader_type` is the type or class of a PEP 302 ``module.__loader__``, - and `provider_factory` is a function that, passed a *module* object, - returns an ``IResourceProvider`` for that module. - """ - _provider_factories[loader_type] = provider_factory - -def get_provider(moduleOrReq): - """Return an IResourceProvider for the named module or requirement""" - if isinstance(moduleOrReq, Requirement): - return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] - try: - module = sys.modules[moduleOrReq] - except KeyError: - __import__(moduleOrReq) - module = sys.modules[moduleOrReq] - loader = getattr(module, '__loader__', None) - return _find_adapter(_provider_factories, loader)(module) - -def _macosx_vers(_cache=[]): - if not _cache: - version = platform.mac_ver()[0] - # fallback for MacPorts - if version == '': - plist = '/System/Library/CoreServices/SystemVersion.plist' - if os.path.exists(plist): - if hasattr(plistlib, 'readPlist'): - plist_content = plistlib.readPlist(plist) - if 'ProductVersion' in plist_content: - version = plist_content['ProductVersion'] - - _cache.append(version.split('.')) - return _cache[0] - -def _macosx_arch(machine): - return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) - -def get_build_platform(): - """Return this platform's string for platform-specific distributions - - XXX Currently this is the same as ``distutils.util.get_platform()``, but it - needs some hacks for Linux and Mac OS X. - """ - try: - # Python 2.7 or >=3.2 - from sysconfig import get_platform - except ImportError: - from distutils.util import get_platform - - plat = get_platform() - if sys.platform == "darwin" and not plat.startswith('macosx-'): - try: - version = _macosx_vers() - machine = os.uname()[4].replace(" ", "_") - return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]), - _macosx_arch(machine)) - except ValueError: - # if someone is running a non-Mac darwin system, this will fall - # through to the default implementation - pass - return plat - -macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") -darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") -# XXX backward compat -get_platform = get_build_platform - - -def compatible_platforms(provided, required): - """Can code for the `provided` platform run on the `required` platform? - - Returns true if either platform is ``None``, or the platforms are equal. - - XXX Needs compatibility checks for Linux and other unixy OSes. - """ - if provided is None or required is None or provided==required: - # easy case - return True - - # Mac OS X special cases - reqMac = macosVersionString.match(required) - if reqMac: - provMac = macosVersionString.match(provided) - - # is this a Mac package? - if not provMac: - # this is backwards compatibility for packages built before - # setuptools 0.6. All packages built after this point will - # use the new macosx designation. - provDarwin = darwinVersionString.match(provided) - if provDarwin: - dversion = int(provDarwin.group(1)) - macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) - if dversion == 7 and macosversion >= "10.3" or \ - dversion == 8 and macosversion >= "10.4": - return True - # egg isn't macosx or legacy darwin - return False - - # are they the same major version and machine type? - if provMac.group(1) != reqMac.group(1) or \ - provMac.group(3) != reqMac.group(3): - return False - - # is the required OS major update >= the provided one? - if int(provMac.group(2)) > int(reqMac.group(2)): - return False - - return True - - # XXX Linux and other platforms' special cases should go here - return False - - -def run_script(dist_spec, script_name): - """Locate distribution `dist_spec` and run its `script_name` script""" - ns = sys._getframe(1).f_globals - name = ns['__name__'] - ns.clear() - ns['__name__'] = name - require(dist_spec)[0].run_script(script_name, ns) - -# backward compatibility -run_main = run_script - -def get_distribution(dist): - """Return a current distribution object for a Requirement or string""" - if isinstance(dist, string_types): - dist = Requirement.parse(dist) - if isinstance(dist, Requirement): - dist = get_provider(dist) - if not isinstance(dist, Distribution): - raise TypeError("Expected string, Requirement, or Distribution", dist) - return dist - -def load_entry_point(dist, group, name): - """Return `name` entry point of `group` for `dist` or raise ImportError""" - return get_distribution(dist).load_entry_point(group, name) - -def get_entry_map(dist, group=None): - """Return the entry point map for `group`, or the full entry map""" - return get_distribution(dist).get_entry_map(group) - -def get_entry_info(dist, group, name): - """Return the EntryPoint object for `group`+`name`, or ``None``""" - return get_distribution(dist).get_entry_info(group, name) - - -class IMetadataProvider: - - def has_metadata(name): - """Does the package's distribution contain the named metadata?""" - - def get_metadata(name): - """The named metadata resource as a string""" - - def get_metadata_lines(name): - """Yield named metadata resource as list of non-blank non-comment lines - - Leading and trailing whitespace is stripped from each line, and lines - with ``#`` as the first non-blank character are omitted.""" - - def metadata_isdir(name): - """Is the named metadata a directory? (like ``os.path.isdir()``)""" - - def metadata_listdir(name): - """List of metadata names in the directory (like ``os.listdir()``)""" - - def run_script(script_name, namespace): - """Execute the named script in the supplied namespace dictionary""" - - -class IResourceProvider(IMetadataProvider): - """An object that provides access to package resources""" - - def get_resource_filename(manager, resource_name): - """Return a true filesystem path for `resource_name` - - `manager` must be an ``IResourceManager``""" - - def get_resource_stream(manager, resource_name): - """Return a readable file-like object for `resource_name` - - `manager` must be an ``IResourceManager``""" - - def get_resource_string(manager, resource_name): - """Return a string containing the contents of `resource_name` - - `manager` must be an ``IResourceManager``""" - - def has_resource(resource_name): - """Does the package contain the named resource?""" - - def resource_isdir(resource_name): - """Is the named resource a directory? (like ``os.path.isdir()``)""" - - def resource_listdir(resource_name): - """List of resource names in the directory (like ``os.listdir()``)""" - - -class WorkingSet(object): - """A collection of active distributions on sys.path (or a similar list)""" - - def __init__(self, entries=None): - """Create working set from list of path entries (default=sys.path)""" - self.entries = [] - self.entry_keys = {} - self.by_key = {} - self.callbacks = [] - - if entries is None: - entries = sys.path - - for entry in entries: - self.add_entry(entry) - - @classmethod - def _build_master(cls): - """ - Prepare the master working set. - """ - ws = cls() - try: - from __main__ import __requires__ - except ImportError: - # The main program does not list any requirements - return ws - - # ensure the requirements are met - try: - ws.require(__requires__) - except VersionConflict: - return cls._build_from_requirements(__requires__) - - return ws - - @classmethod - def _build_from_requirements(cls, req_spec): - """ - Build a working set from a requirement spec. Rewrites sys.path. - """ - # try it without defaults already on sys.path - # by starting with an empty path - ws = cls([]) - reqs = parse_requirements(req_spec) - dists = ws.resolve(reqs, Environment()) - for dist in dists: - ws.add(dist) - - # add any missing entries from sys.path - for entry in sys.path: - if entry not in ws.entries: - ws.add_entry(entry) - - # then copy back to sys.path - sys.path[:] = ws.entries - return ws - - def add_entry(self, entry): - """Add a path item to ``.entries``, finding any distributions on it - - ``find_distributions(entry, True)`` is used to find distributions - corresponding to the path entry, and they are added. `entry` is - always appended to ``.entries``, even if it is already present. - (This is because ``sys.path`` can contain the same value more than - once, and the ``.entries`` of the ``sys.path`` WorkingSet should always - equal ``sys.path``.) - """ - self.entry_keys.setdefault(entry, []) - self.entries.append(entry) - for dist in find_distributions(entry, True): - self.add(dist, entry, False) - - def __contains__(self, dist): - """True if `dist` is the active distribution for its project""" - return self.by_key.get(dist.key) == dist - - def find(self, req): - """Find a distribution matching requirement `req` - - If there is an active distribution for the requested project, this - returns it as long as it meets the version requirement specified by - `req`. But, if there is an active distribution for the project and it - does *not* meet the `req` requirement, ``VersionConflict`` is raised. - If there is no active distribution for the requested project, ``None`` - is returned. - """ - dist = self.by_key.get(req.key) - if dist is not None and dist not in req: - # XXX add more info - raise VersionConflict(dist, req) - return dist - - def iter_entry_points(self, group, name=None): - """Yield entry point objects from `group` matching `name` - - If `name` is None, yields all entry points in `group` from all - distributions in the working set, otherwise only ones matching - both `group` and `name` are yielded (in distribution order). - """ - for dist in self: - entries = dist.get_entry_map(group) - if name is None: - for ep in entries.values(): - yield ep - elif name in entries: - yield entries[name] - - def run_script(self, requires, script_name): - """Locate distribution for `requires` and run `script_name` script""" - ns = sys._getframe(1).f_globals - name = ns['__name__'] - ns.clear() - ns['__name__'] = name - self.require(requires)[0].run_script(script_name, ns) - - def __iter__(self): - """Yield distributions for non-duplicate projects in the working set - - The yield order is the order in which the items' path entries were - added to the working set. - """ - seen = {} - for item in self.entries: - if item not in self.entry_keys: - # workaround a cache issue - continue - - for key in self.entry_keys[item]: - if key not in seen: - seen[key]=1 - yield self.by_key[key] - - def add(self, dist, entry=None, insert=True, replace=False): - """Add `dist` to working set, associated with `entry` - - If `entry` is unspecified, it defaults to the ``.location`` of `dist`. - On exit from this routine, `entry` is added to the end of the working - set's ``.entries`` (if it wasn't already present). - - `dist` is only added to the working set if it's for a project that - doesn't already have a distribution in the set, unless `replace=True`. - If it's added, any callbacks registered with the ``subscribe()`` method - will be called. - """ - if insert: - dist.insert_on(self.entries, entry) - - if entry is None: - entry = dist.location - keys = self.entry_keys.setdefault(entry,[]) - keys2 = self.entry_keys.setdefault(dist.location,[]) - if not replace and dist.key in self.by_key: - # ignore hidden distros - return - - self.by_key[dist.key] = dist - if dist.key not in keys: - keys.append(dist.key) - if dist.key not in keys2: - keys2.append(dist.key) - self._added_new(dist) - - def resolve(self, requirements, env=None, installer=None, - replace_conflicting=False): - """List all distributions needed to (recursively) meet `requirements` - - `requirements` must be a sequence of ``Requirement`` objects. `env`, - if supplied, should be an ``Environment`` instance. If - not supplied, it defaults to all distributions available within any - entry or distribution in the working set. `installer`, if supplied, - will be invoked with each requirement that cannot be met by an - already-installed distribution; it should return a ``Distribution`` or - ``None``. - - Unless `replace_conflicting=True`, raises a VersionConflict exception if - any requirements are found on the path that have the correct name but - the wrong version. Otherwise, if an `installer` is supplied it will be - invoked to obtain the correct version of the requirement and activate - it. - """ - - # set up the stack - requirements = list(requirements)[::-1] - # set of processed requirements - processed = {} - # key -> dist - best = {} - to_activate = [] - - # Mapping of requirement to set of distributions that required it; - # useful for reporting info about conflicts. - required_by = collections.defaultdict(set) - - while requirements: - # process dependencies breadth-first - req = requirements.pop(0) - if req in processed: - # Ignore cyclic or redundant dependencies - continue - dist = best.get(req.key) - if dist is None: - # Find the best distribution and add it to the map - dist = self.by_key.get(req.key) - if dist is None or (dist not in req and replace_conflicting): - ws = self - if env is None: - if dist is None: - env = Environment(self.entries) - else: - # Use an empty environment and workingset to avoid - # any further conflicts with the conflicting - # distribution - env = Environment([]) - ws = WorkingSet([]) - dist = best[req.key] = env.best_match(req, ws, installer) - if dist is None: - requirers = required_by.get(req, None) - raise DistributionNotFound(req, requirers) - to_activate.append(dist) - if dist not in req: - # Oops, the "best" so far conflicts with a dependency - dependent_req = required_by[req] - raise VersionConflict(dist, req).with_context(dependent_req) - - # push the new requirements onto the stack - new_requirements = dist.requires(req.extras)[::-1] - requirements.extend(new_requirements) - - # Register the new requirements needed by req - for new_requirement in new_requirements: - required_by[new_requirement].add(req.project_name) - - processed[req] = True - - # return list of distros to activate - return to_activate - - def find_plugins(self, plugin_env, full_env=None, installer=None, - fallback=True): - """Find all activatable distributions in `plugin_env` - - Example usage:: - - distributions, errors = working_set.find_plugins( - Environment(plugin_dirlist) - ) - # add plugins+libs to sys.path - map(working_set.add, distributions) - # display errors - print('Could not load', errors) - - The `plugin_env` should be an ``Environment`` instance that contains - only distributions that are in the project's "plugin directory" or - directories. The `full_env`, if supplied, should be an ``Environment`` - contains all currently-available distributions. If `full_env` is not - supplied, one is created automatically from the ``WorkingSet`` this - method is called on, which will typically mean that every directory on - ``sys.path`` will be scanned for distributions. - - `installer` is a standard installer callback as used by the - ``resolve()`` method. The `fallback` flag indicates whether we should - attempt to resolve older versions of a plugin if the newest version - cannot be resolved. - - This method returns a 2-tuple: (`distributions`, `error_info`), where - `distributions` is a list of the distributions found in `plugin_env` - that were loadable, along with any other distributions that are needed - to resolve their dependencies. `error_info` is a dictionary mapping - unloadable plugin distributions to an exception instance describing the - error that occurred. Usually this will be a ``DistributionNotFound`` or - ``VersionConflict`` instance. - """ - - plugin_projects = list(plugin_env) - # scan project names in alphabetic order - plugin_projects.sort() - - error_info = {} - distributions = {} - - if full_env is None: - env = Environment(self.entries) - env += plugin_env - else: - env = full_env + plugin_env - - shadow_set = self.__class__([]) - # put all our entries in shadow_set - list(map(shadow_set.add, self)) - - for project_name in plugin_projects: - - for dist in plugin_env[project_name]: - - req = [dist.as_requirement()] - - try: - resolvees = shadow_set.resolve(req, env, installer) - - except ResolutionError as v: - # save error info - error_info[dist] = v - if fallback: - # try the next older version of project - continue - else: - # give up on this project, keep going - break - - else: - list(map(shadow_set.add, resolvees)) - distributions.update(dict.fromkeys(resolvees)) - - # success, no need to try any more versions of this project - break - - distributions = list(distributions) - distributions.sort() - - return distributions, error_info - - def require(self, *requirements): - """Ensure that distributions matching `requirements` are activated - - `requirements` must be a string or a (possibly-nested) sequence - thereof, specifying the distributions and versions required. The - return value is a sequence of the distributions that needed to be - activated to fulfill the requirements; all relevant distributions are - included, even if they were already activated in this working set. - """ - needed = self.resolve(parse_requirements(requirements)) - - for dist in needed: - self.add(dist) - - return needed - - def subscribe(self, callback): - """Invoke `callback` for all distributions (including existing ones)""" - if callback in self.callbacks: - return - self.callbacks.append(callback) - for dist in self: - callback(dist) - - def _added_new(self, dist): - for callback in self.callbacks: - callback(dist) - - def __getstate__(self): - return ( - self.entries[:], self.entry_keys.copy(), self.by_key.copy(), - self.callbacks[:] - ) - - def __setstate__(self, e_k_b_c): - entries, keys, by_key, callbacks = e_k_b_c - self.entries = entries[:] - self.entry_keys = keys.copy() - self.by_key = by_key.copy() - self.callbacks = callbacks[:] - - -class Environment(object): - """Searchable snapshot of distributions on a search path""" - - def __init__(self, search_path=None, platform=get_supported_platform(), - python=PY_MAJOR): - """Snapshot distributions available on a search path - - Any distributions found on `search_path` are added to the environment. - `search_path` should be a sequence of ``sys.path`` items. If not - supplied, ``sys.path`` is used. - - `platform` is an optional string specifying the name of the platform - that platform-specific distributions must be compatible with. If - unspecified, it defaults to the current platform. `python` is an - optional string naming the desired version of Python (e.g. ``'3.3'``); - it defaults to the current version. - - You may explicitly set `platform` (and/or `python`) to ``None`` if you - wish to map *all* distributions, not just those compatible with the - running platform or Python version. - """ - self._distmap = {} - self.platform = platform - self.python = python - self.scan(search_path) - - def can_add(self, dist): - """Is distribution `dist` acceptable for this environment? - - The distribution must match the platform and python version - requirements specified when this environment was created, or False - is returned. - """ - return (self.python is None or dist.py_version is None - or dist.py_version==self.python) \ - and compatible_platforms(dist.platform, self.platform) - - def remove(self, dist): - """Remove `dist` from the environment""" - self._distmap[dist.key].remove(dist) - - def scan(self, search_path=None): - """Scan `search_path` for distributions usable in this environment - - Any distributions found are added to the environment. - `search_path` should be a sequence of ``sys.path`` items. If not - supplied, ``sys.path`` is used. Only distributions conforming to - the platform/python version defined at initialization are added. - """ - if search_path is None: - search_path = sys.path - - for item in search_path: - for dist in find_distributions(item): - self.add(dist) - - def __getitem__(self, project_name): - """Return a newest-to-oldest list of distributions for `project_name` - - Uses case-insensitive `project_name` comparison, assuming all the - project's distributions use their project's name converted to all - lowercase as their key. - - """ - distribution_key = project_name.lower() - return self._distmap.get(distribution_key, []) - - def add(self, dist): - """Add `dist` if we ``can_add()`` it and it has not already been added - """ - if self.can_add(dist) and dist.has_version(): - dists = self._distmap.setdefault(dist.key, []) - if dist not in dists: - dists.append(dist) - dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) - - def best_match(self, req, working_set, installer=None): - """Find distribution best matching `req` and usable on `working_set` - - This calls the ``find(req)`` method of the `working_set` to see if a - suitable distribution is already active. (This may raise - ``VersionConflict`` if an unsuitable version of the project is already - active in the specified `working_set`.) If a suitable distribution - isn't active, this method returns the newest distribution in the - environment that meets the ``Requirement`` in `req`. If no suitable - distribution is found, and `installer` is supplied, then the result of - calling the environment's ``obtain(req, installer)`` method will be - returned. - """ - dist = working_set.find(req) - if dist is not None: - return dist - for dist in self[req.key]: - if dist in req: - return dist - # try to download/install - return self.obtain(req, installer) - - def obtain(self, requirement, installer=None): - """Obtain a distribution matching `requirement` (e.g. via download) - - Obtain a distro that matches requirement (e.g. via download). In the - base ``Environment`` class, this routine just returns - ``installer(requirement)``, unless `installer` is None, in which case - None is returned instead. This method is a hook that allows subclasses - to attempt other ways of obtaining a distribution before falling back - to the `installer` argument.""" - if installer is not None: - return installer(requirement) - - def __iter__(self): - """Yield the unique project names of the available distributions""" - for key in self._distmap.keys(): - if self[key]: - yield key - - def __iadd__(self, other): - """In-place addition of a distribution or environment""" - if isinstance(other, Distribution): - self.add(other) - elif isinstance(other, Environment): - for project in other: - for dist in other[project]: - self.add(dist) - else: - raise TypeError("Can't add %r to environment" % (other,)) - return self - - def __add__(self, other): - """Add an environment or distribution to an environment""" - new = self.__class__([], platform=None, python=None) - for env in self, other: - new += env - return new - - -# XXX backward compatibility -AvailableDistributions = Environment - - -class ExtractionError(RuntimeError): - """An error occurred extracting a resource - - The following attributes are available from instances of this exception: - - manager - The resource manager that raised this exception - - cache_path - The base directory for resource extraction - - original_error - The exception instance that caused extraction to fail - """ - - -class ResourceManager: - """Manage resource extraction and packages""" - extraction_path = None - - def __init__(self): - self.cached_files = {} - - def resource_exists(self, package_or_requirement, resource_name): - """Does the named resource exist?""" - return get_provider(package_or_requirement).has_resource(resource_name) - - def resource_isdir(self, package_or_requirement, resource_name): - """Is the named resource an existing directory?""" - return get_provider(package_or_requirement).resource_isdir( - resource_name - ) - - def resource_filename(self, package_or_requirement, resource_name): - """Return a true filesystem path for specified resource""" - return get_provider(package_or_requirement).get_resource_filename( - self, resource_name - ) - - def resource_stream(self, package_or_requirement, resource_name): - """Return a readable file-like object for specified resource""" - return get_provider(package_or_requirement).get_resource_stream( - self, resource_name - ) - - def resource_string(self, package_or_requirement, resource_name): - """Return specified resource as a string""" - return get_provider(package_or_requirement).get_resource_string( - self, resource_name - ) - - def resource_listdir(self, package_or_requirement, resource_name): - """List the contents of the named resource directory""" - return get_provider(package_or_requirement).resource_listdir( - resource_name - ) - - def extraction_error(self): - """Give an error message for problems extracting file(s)""" - - old_exc = sys.exc_info()[1] - cache_path = self.extraction_path or get_default_cache() - - err = ExtractionError("""Can't extract file(s) to egg cache - -The following error occurred while trying to extract file(s) to the Python egg -cache: - - %s - -The Python egg cache directory is currently set to: - - %s - -Perhaps your account does not have write access to this directory? You can -change the cache directory by setting the PYTHON_EGG_CACHE environment -variable to point to an accessible directory. -""" % (old_exc, cache_path) - ) - err.manager = self - err.cache_path = cache_path - err.original_error = old_exc - raise err - - def get_cache_path(self, archive_name, names=()): - """Return absolute location in cache for `archive_name` and `names` - - The parent directory of the resulting path will be created if it does - not already exist. `archive_name` should be the base filename of the - enclosing egg (which may not be the name of the enclosing zipfile!), - including its ".egg" extension. `names`, if provided, should be a - sequence of path name parts "under" the egg's extraction location. - - This method should only be called by resource providers that need to - obtain an extraction location, and only for names they intend to - extract, as it tracks the generated names for possible cleanup later. - """ - extract_path = self.extraction_path or get_default_cache() - target_path = os.path.join(extract_path, archive_name+'-tmp', *names) - try: - _bypass_ensure_directory(target_path) - except: - self.extraction_error() - - self._warn_unsafe_extraction_path(extract_path) - - self.cached_files[target_path] = 1 - return target_path - - @staticmethod - def _warn_unsafe_extraction_path(path): - """ - If the default extraction path is overridden and set to an insecure - location, such as /tmp, it opens up an opportunity for an attacker to - replace an extracted file with an unauthorized payload. Warn the user - if a known insecure location is used. - - See Distribute #375 for more details. - """ - if os.name == 'nt' and not path.startswith(os.environ['windir']): - # On Windows, permissions are generally restrictive by default - # and temp directories are not writable by other users, so - # bypass the warning. - return - mode = os.stat(path).st_mode - if mode & stat.S_IWOTH or mode & stat.S_IWGRP: - msg = ("%s is writable by group/others and vulnerable to attack " - "when " - "used with get_resource_filename. Consider a more secure " - "location (set with .set_extraction_path or the " - "PYTHON_EGG_CACHE environment variable)." % path) - warnings.warn(msg, UserWarning) - - def postprocess(self, tempname, filename): - """Perform any platform-specific postprocessing of `tempname` - - This is where Mac header rewrites should be done; other platforms don't - have anything special they should do. - - Resource providers should call this method ONLY after successfully - extracting a compressed resource. They must NOT call it on resources - that are already in the filesystem. - - `tempname` is the current (temporary) name of the file, and `filename` - is the name it will be renamed to by the caller after this routine - returns. - """ - - if os.name == 'posix': - # Make the resource executable - mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777 - os.chmod(tempname, mode) - - def set_extraction_path(self, path): - """Set the base path where resources will be extracted to, if needed. - - If you do not call this routine before any extractions take place, the - path defaults to the return value of ``get_default_cache()``. (Which - is based on the ``PYTHON_EGG_CACHE`` environment variable, with various - platform-specific fallbacks. See that routine's documentation for more - details.) - - Resources are extracted to subdirectories of this path based upon - information given by the ``IResourceProvider``. You may set this to a - temporary directory, but then you must call ``cleanup_resources()`` to - delete the extracted files when done. There is no guarantee that - ``cleanup_resources()`` will be able to remove all extracted files. - - (Note: you may not change the extraction path for a given resource - manager once resources have been extracted, unless you first call - ``cleanup_resources()``.) - """ - if self.cached_files: - raise ValueError( - "Can't change extraction path, files already extracted" - ) - - self.extraction_path = path - - def cleanup_resources(self, force=False): - """ - Delete all extracted resource files and directories, returning a list - of the file and directory names that could not be successfully removed. - This function does not have any concurrency protection, so it should - generally only be called when the extraction path is a temporary - directory exclusive to a single process. This method is not - automatically called; you must call it explicitly or register it as an - ``atexit`` function if you wish to ensure cleanup of a temporary - directory used for extractions. - """ - # XXX - -def get_default_cache(): - """Determine the default cache location - - This returns the ``PYTHON_EGG_CACHE`` environment variable, if set. - Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the - "Application Data" directory. On all other systems, it's "~/.python-eggs". - """ - try: - return os.environ['PYTHON_EGG_CACHE'] - except KeyError: - pass - - if os.name!='nt': - return os.path.expanduser('~/.python-eggs') - - # XXX this may be locale-specific! - app_data = 'Application Data' - app_homes = [ - # best option, should be locale-safe - (('APPDATA',), None), - (('USERPROFILE',), app_data), - (('HOMEDRIVE','HOMEPATH'), app_data), - (('HOMEPATH',), app_data), - (('HOME',), None), - # 95/98/ME - (('WINDIR',), app_data), - ] - - for keys, subdir in app_homes: - dirname = '' - for key in keys: - if key in os.environ: - dirname = os.path.join(dirname, os.environ[key]) - else: - break - else: - if subdir: - dirname = os.path.join(dirname, subdir) - return os.path.join(dirname, 'Python-Eggs') - else: - raise RuntimeError( - "Please set the PYTHON_EGG_CACHE enviroment variable" - ) - -def safe_name(name): - """Convert an arbitrary string to a standard distribution name - - Any runs of non-alphanumeric/. characters are replaced with a single '-'. - """ - return re.sub('[^A-Za-z0-9.]+', '-', name) - - -def safe_version(version): - """ - Convert an arbitrary string to a standard version string - """ - try: - # normalize the version - return str(packaging.version.Version(version)) - except packaging.version.InvalidVersion: - version = version.replace(' ','.') - return re.sub('[^A-Za-z0-9.]+', '-', version) - - -def safe_extra(extra): - """Convert an arbitrary string to a standard 'extra' name - - Any runs of non-alphanumeric characters are replaced with a single '_', - and the result is always lowercased. - """ - return re.sub('[^A-Za-z0-9.]+', '_', extra).lower() - - -def to_filename(name): - """Convert a project or version name to its filename-escaped form - - Any '-' characters are currently replaced with '_'. - """ - return name.replace('-','_') - - -class MarkerEvaluation(object): - values = { - 'os_name': lambda: os.name, - 'sys_platform': lambda: sys.platform, - 'python_full_version': platform.python_version, - 'python_version': lambda: platform.python_version()[:3], - 'platform_version': platform.version, - 'platform_machine': platform.machine, - 'python_implementation': platform.python_implementation, - } - - @classmethod - def is_invalid_marker(cls, text): - """ - Validate text as a PEP 426 environment marker; return an exception - if invalid or False otherwise. - """ - try: - cls.evaluate_marker(text) - except SyntaxError as e: - return cls.normalize_exception(e) - return False - - @staticmethod - def normalize_exception(exc): - """ - Given a SyntaxError from a marker evaluation, normalize the error - message: - - Remove indications of filename and line number. - - Replace platform-specific error messages with standard error - messages. - """ - subs = { - 'unexpected EOF while parsing': 'invalid syntax', - 'parenthesis is never closed': 'invalid syntax', - } - exc.filename = None - exc.lineno = None - exc.msg = subs.get(exc.msg, exc.msg) - return exc - - @classmethod - def and_test(cls, nodelist): - # MUST NOT short-circuit evaluation, or invalid syntax can be skipped! - items = [ - cls.interpret(nodelist[i]) - for i in range(1, len(nodelist), 2) - ] - return functools.reduce(operator.and_, items) - - @classmethod - def test(cls, nodelist): - # MUST NOT short-circuit evaluation, or invalid syntax can be skipped! - items = [ - cls.interpret(nodelist[i]) - for i in range(1, len(nodelist), 2) - ] - return functools.reduce(operator.or_, items) - - @classmethod - def atom(cls, nodelist): - t = nodelist[1][0] - if t == token.LPAR: - if nodelist[2][0] == token.RPAR: - raise SyntaxError("Empty parentheses") - return cls.interpret(nodelist[2]) - msg = "Language feature not supported in environment markers" - raise SyntaxError(msg) - - @classmethod - def comparison(cls, nodelist): - if len(nodelist) > 4: - msg = "Chained comparison not allowed in environment markers" - raise SyntaxError(msg) - comp = nodelist[2][1] - cop = comp[1] - if comp[0] == token.NAME: - if len(nodelist[2]) == 3: - if cop == 'not': - cop = 'not in' - else: - cop = 'is not' - try: - cop = cls.get_op(cop) - except KeyError: - msg = repr(cop) + " operator not allowed in environment markers" - raise SyntaxError(msg) - return cop(cls.evaluate(nodelist[1]), cls.evaluate(nodelist[3])) - - @classmethod - def get_op(cls, op): - ops = { - symbol.test: cls.test, - symbol.and_test: cls.and_test, - symbol.atom: cls.atom, - symbol.comparison: cls.comparison, - 'not in': lambda x, y: x not in y, - 'in': lambda x, y: x in y, - '==': operator.eq, - '!=': operator.ne, - '<': operator.lt, - '>': operator.gt, - '<=': operator.le, - '>=': operator.ge, - } - if hasattr(symbol, 'or_test'): - ops[symbol.or_test] = cls.test - return ops[op] - - @classmethod - def evaluate_marker(cls, text, extra=None): - """ - Evaluate a PEP 426 environment marker on CPython 2.4+. - Return a boolean indicating the marker result in this environment. - Raise SyntaxError if marker is invalid. - - This implementation uses the 'parser' module, which is not implemented - on - Jython and has been superseded by the 'ast' module in Python 2.6 and - later. - """ - return cls.interpret(parser.expr(text).totuple(1)[1]) - - @classmethod - def _markerlib_evaluate(cls, text): - """ - Evaluate a PEP 426 environment marker using markerlib. - Return a boolean indicating the marker result in this environment. - Raise SyntaxError if marker is invalid. - """ - import _markerlib - # markerlib implements Metadata 1.2 (PEP 345) environment markers. - # Translate the variables to Metadata 2.0 (PEP 426). - env = _markerlib.default_environment() - for key in env.keys(): - new_key = key.replace('.', '_') - env[new_key] = env.pop(key) - try: - result = _markerlib.interpret(text, env) - except NameError as e: - raise SyntaxError(e.args[0]) - return result - - if 'parser' not in globals(): - # Fall back to less-complete _markerlib implementation if 'parser' module - # is not available. - evaluate_marker = _markerlib_evaluate - - @classmethod - def interpret(cls, nodelist): - while len(nodelist)==2: nodelist = nodelist[1] - try: - op = cls.get_op(nodelist[0]) - except KeyError: - raise SyntaxError("Comparison or logical expression expected") - return op(nodelist) - - @classmethod - def evaluate(cls, nodelist): - while len(nodelist)==2: nodelist = nodelist[1] - kind = nodelist[0] - name = nodelist[1] - if kind==token.NAME: - try: - op = cls.values[name] - except KeyError: - raise SyntaxError("Unknown name %r" % name) - return op() - if kind==token.STRING: - s = nodelist[1] - if not cls._safe_string(s): - raise SyntaxError( - "Only plain strings allowed in environment markers") - return s[1:-1] - msg = "Language feature not supported in environment markers" - raise SyntaxError(msg) - - @staticmethod - def _safe_string(cand): - return ( - cand[:1] in "'\"" and - not cand.startswith('"""') and - not cand.startswith("'''") and - '\\' not in cand - ) - -invalid_marker = MarkerEvaluation.is_invalid_marker -evaluate_marker = MarkerEvaluation.evaluate_marker - -class NullProvider: - """Try to implement resources and metadata for arbitrary PEP 302 loaders""" - - egg_name = None - egg_info = None - loader = None - - def __init__(self, module): - self.loader = getattr(module, '__loader__', None) - self.module_path = os.path.dirname(getattr(module, '__file__', '')) - - def get_resource_filename(self, manager, resource_name): - return self._fn(self.module_path, resource_name) - - def get_resource_stream(self, manager, resource_name): - return io.BytesIO(self.get_resource_string(manager, resource_name)) - - def get_resource_string(self, manager, resource_name): - return self._get(self._fn(self.module_path, resource_name)) - - def has_resource(self, resource_name): - return self._has(self._fn(self.module_path, resource_name)) - - def has_metadata(self, name): - return self.egg_info and self._has(self._fn(self.egg_info, name)) - - if sys.version_info <= (3,): - def get_metadata(self, name): - if not self.egg_info: - return "" - return self._get(self._fn(self.egg_info, name)) - else: - def get_metadata(self, name): - if not self.egg_info: - return "" - return self._get(self._fn(self.egg_info, name)).decode("utf-8") - - def get_metadata_lines(self, name): - return yield_lines(self.get_metadata(name)) - - def resource_isdir(self, resource_name): - return self._isdir(self._fn(self.module_path, resource_name)) - - def metadata_isdir(self, name): - return self.egg_info and self._isdir(self._fn(self.egg_info, name)) - - def resource_listdir(self, resource_name): - return self._listdir(self._fn(self.module_path, resource_name)) - - def metadata_listdir(self, name): - if self.egg_info: - return self._listdir(self._fn(self.egg_info, name)) - return [] - - def run_script(self, script_name, namespace): - script = 'scripts/'+script_name - if not self.has_metadata(script): - raise ResolutionError("No script named %r" % script_name) - script_text = self.get_metadata(script).replace('\r\n', '\n') - script_text = script_text.replace('\r', '\n') - script_filename = self._fn(self.egg_info, script) - namespace['__file__'] = script_filename - if os.path.exists(script_filename): - source = open(script_filename).read() - code = compile(source, script_filename, 'exec') - exec(code, namespace, namespace) - else: - from linecache import cache - cache[script_filename] = ( - len(script_text), 0, script_text.split('\n'), script_filename - ) - script_code = compile(script_text, script_filename,'exec') - exec(script_code, namespace, namespace) - - def _has(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _isdir(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _listdir(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _fn(self, base, resource_name): - if resource_name: - return os.path.join(base, *resource_name.split('/')) - return base - - def _get(self, path): - if hasattr(self.loader, 'get_data'): - return self.loader.get_data(path) - raise NotImplementedError( - "Can't perform this operation for loaders without 'get_data()'" - ) - -register_loader_type(object, NullProvider) - - -class EggProvider(NullProvider): - """Provider based on a virtual filesystem""" - - def __init__(self, module): - NullProvider.__init__(self, module) - self._setup_prefix() - - def _setup_prefix(self): - # we assume here that our metadata may be nested inside a "basket" - # of multiple eggs; that's why we use module_path instead of .archive - path = self.module_path - old = None - while path!=old: - if path.lower().endswith('.egg'): - self.egg_name = os.path.basename(path) - self.egg_info = os.path.join(path, 'EGG-INFO') - self.egg_root = path - break - old = path - path, base = os.path.split(path) - -class DefaultProvider(EggProvider): - """Provides access to package resources in the filesystem""" - - def _has(self, path): - return os.path.exists(path) - - def _isdir(self, path): - return os.path.isdir(path) - - def _listdir(self, path): - return os.listdir(path) - - def get_resource_stream(self, manager, resource_name): - return open(self._fn(self.module_path, resource_name), 'rb') - - def _get(self, path): - with open(path, 'rb') as stream: - return stream.read() - -register_loader_type(type(None), DefaultProvider) - -if importlib_machinery is not None: - register_loader_type(importlib_machinery.SourceFileLoader, DefaultProvider) - - -class EmptyProvider(NullProvider): - """Provider that returns nothing for all requests""" - - _isdir = _has = lambda self, path: False - _get = lambda self, path: '' - _listdir = lambda self, path: [] - module_path = None - - def __init__(self): - pass - -empty_provider = EmptyProvider() - - -class ZipManifests(dict): - """ - zip manifest builder - """ - - @classmethod - def build(cls, path): - """ - Build a dictionary similar to the zipimport directory - caches, except instead of tuples, store ZipInfo objects. - - Use a platform-specific path separator (os.sep) for the path keys - for compatibility with pypy on Windows. - """ - with ContextualZipFile(path) as zfile: - items = ( - ( - name.replace('/', os.sep), - zfile.getinfo(name), - ) - for name in zfile.namelist() - ) - return dict(items) - - load = build - - -class MemoizedZipManifests(ZipManifests): - """ - Memoized zipfile manifests. - """ - manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') - - def load(self, path): - """ - Load a manifest at path or return a suitable manifest already loaded. - """ - path = os.path.normpath(path) - mtime = os.stat(path).st_mtime - - if path not in self or self[path].mtime != mtime: - manifest = self.build(path) - self[path] = self.manifest_mod(manifest, mtime) - - return self[path].manifest - - -class ContextualZipFile(zipfile.ZipFile): - """ - Supplement ZipFile class to support context manager for Python 2.6 - """ - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - self.close() - - def __new__(cls, *args, **kwargs): - """ - Construct a ZipFile or ContextualZipFile as appropriate - """ - if hasattr(zipfile.ZipFile, '__exit__'): - return zipfile.ZipFile(*args, **kwargs) - return super(ContextualZipFile, cls).__new__(cls) - - -class ZipProvider(EggProvider): - """Resource support for zips and eggs""" - - eagers = None - _zip_manifests = MemoizedZipManifests() - - def __init__(self, module): - EggProvider.__init__(self, module) - self.zip_pre = self.loader.archive+os.sep - - def _zipinfo_name(self, fspath): - # Convert a virtual filename (full path to file) into a zipfile subpath - # usable with the zipimport directory cache for our target archive - if fspath.startswith(self.zip_pre): - return fspath[len(self.zip_pre):] - raise AssertionError( - "%s is not a subpath of %s" % (fspath, self.zip_pre) - ) - - def _parts(self, zip_path): - # Convert a zipfile subpath into an egg-relative path part list. - # pseudo-fs path - fspath = self.zip_pre+zip_path - if fspath.startswith(self.egg_root+os.sep): - return fspath[len(self.egg_root)+1:].split(os.sep) - raise AssertionError( - "%s is not a subpath of %s" % (fspath, self.egg_root) - ) - - @property - def zipinfo(self): - return self._zip_manifests.load(self.loader.archive) - - def get_resource_filename(self, manager, resource_name): - if not self.egg_name: - raise NotImplementedError( - "resource_filename() only supported for .egg, not .zip" - ) - # no need to lock for extraction, since we use temp names - zip_path = self._resource_to_zip(resource_name) - eagers = self._get_eager_resources() - if '/'.join(self._parts(zip_path)) in eagers: - for name in eagers: - self._extract_resource(manager, self._eager_to_zip(name)) - return self._extract_resource(manager, zip_path) - - @staticmethod - def _get_date_and_size(zip_stat): - size = zip_stat.file_size - # ymdhms+wday, yday, dst - date_time = zip_stat.date_time + (0, 0, -1) - # 1980 offset already done - timestamp = time.mktime(date_time) - return timestamp, size - - def _extract_resource(self, manager, zip_path): - - if zip_path in self._index(): - for name in self._index()[zip_path]: - last = self._extract_resource( - manager, os.path.join(zip_path, name) - ) - # return the extracted directory name - return os.path.dirname(last) - - timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) - - if not WRITE_SUPPORT: - raise IOError('"os.rename" and "os.unlink" are not supported ' - 'on this platform') - try: - - real_path = manager.get_cache_path( - self.egg_name, self._parts(zip_path) - ) - - if self._is_current(real_path, zip_path): - return real_path - - outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path)) - os.write(outf, self.loader.get_data(zip_path)) - os.close(outf) - utime(tmpnam, (timestamp, timestamp)) - manager.postprocess(tmpnam, real_path) - - try: - rename(tmpnam, real_path) - - except os.error: - if os.path.isfile(real_path): - if self._is_current(real_path, zip_path): - # the file became current since it was checked above, - # so proceed. - return real_path - # Windows, del old file and retry - elif os.name=='nt': - unlink(real_path) - rename(tmpnam, real_path) - return real_path - raise - - except os.error: - # report a user-friendly error - manager.extraction_error() - - return real_path - - def _is_current(self, file_path, zip_path): - """ - Return True if the file_path is current for this zip_path - """ - timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) - if not os.path.isfile(file_path): - return False - stat = os.stat(file_path) - if stat.st_size!=size or stat.st_mtime!=timestamp: - return False - # check that the contents match - zip_contents = self.loader.get_data(zip_path) - with open(file_path, 'rb') as f: - file_contents = f.read() - return zip_contents == file_contents - - def _get_eager_resources(self): - if self.eagers is None: - eagers = [] - for name in ('native_libs.txt', 'eager_resources.txt'): - if self.has_metadata(name): - eagers.extend(self.get_metadata_lines(name)) - self.eagers = eagers - return self.eagers - - def _index(self): - try: - return self._dirindex - except AttributeError: - ind = {} - for path in self.zipinfo: - parts = path.split(os.sep) - while parts: - parent = os.sep.join(parts[:-1]) - if parent in ind: - ind[parent].append(parts[-1]) - break - else: - ind[parent] = [parts.pop()] - self._dirindex = ind - return ind - - def _has(self, fspath): - zip_path = self._zipinfo_name(fspath) - return zip_path in self.zipinfo or zip_path in self._index() - - def _isdir(self, fspath): - return self._zipinfo_name(fspath) in self._index() - - def _listdir(self, fspath): - return list(self._index().get(self._zipinfo_name(fspath), ())) - - def _eager_to_zip(self, resource_name): - return self._zipinfo_name(self._fn(self.egg_root, resource_name)) - - def _resource_to_zip(self, resource_name): - return self._zipinfo_name(self._fn(self.module_path, resource_name)) - -register_loader_type(zipimport.zipimporter, ZipProvider) - - -class FileMetadata(EmptyProvider): - """Metadata handler for standalone PKG-INFO files - - Usage:: - - metadata = FileMetadata("/path/to/PKG-INFO") - - This provider rejects all data and metadata requests except for PKG-INFO, - which is treated as existing, and will be the contents of the file at - the provided location. - """ - - def __init__(self, path): - self.path = path - - def has_metadata(self, name): - return name=='PKG-INFO' - - def get_metadata(self, name): - if name=='PKG-INFO': - with open(self.path,'rU') as f: - metadata = f.read() - return metadata - raise KeyError("No metadata except PKG-INFO is available") - - def get_metadata_lines(self, name): - return yield_lines(self.get_metadata(name)) - - -class PathMetadata(DefaultProvider): - """Metadata provider for egg directories - - Usage:: - - # Development eggs: - - egg_info = "/path/to/PackageName.egg-info" - base_dir = os.path.dirname(egg_info) - metadata = PathMetadata(base_dir, egg_info) - dist_name = os.path.splitext(os.path.basename(egg_info))[0] - dist = Distribution(basedir, project_name=dist_name, metadata=metadata) - - # Unpacked egg directories: - - egg_path = "/path/to/PackageName-ver-pyver-etc.egg" - metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) - dist = Distribution.from_filename(egg_path, metadata=metadata) - """ - - def __init__(self, path, egg_info): - self.module_path = path - self.egg_info = egg_info - - -class EggMetadata(ZipProvider): - """Metadata provider for .egg files""" - - def __init__(self, importer): - """Create a metadata provider from a zipimporter""" - - self.zip_pre = importer.archive+os.sep - self.loader = importer - if importer.prefix: - self.module_path = os.path.join(importer.archive, importer.prefix) - else: - self.module_path = importer.archive - self._setup_prefix() - -_declare_state('dict', _distribution_finders = {}) - -def register_finder(importer_type, distribution_finder): - """Register `distribution_finder` to find distributions in sys.path items - - `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item - handler), and `distribution_finder` is a callable that, passed a path - item and the importer instance, yields ``Distribution`` instances found on - that path item. See ``pkg_resources.find_on_path`` for an example.""" - _distribution_finders[importer_type] = distribution_finder - - -def find_distributions(path_item, only=False): - """Yield distributions accessible via `path_item`""" - importer = get_importer(path_item) - finder = _find_adapter(_distribution_finders, importer) - return finder(importer, path_item, only) - -def find_eggs_in_zip(importer, path_item, only=False): - """ - Find eggs in zip files; possibly multiple nested eggs. - """ - if importer.archive.endswith('.whl'): - # wheels are not supported with this finder - # they don't have PKG-INFO metadata, and won't ever contain eggs - return - metadata = EggMetadata(importer) - if metadata.has_metadata('PKG-INFO'): - yield Distribution.from_filename(path_item, metadata=metadata) - if only: - # don't yield nested distros - return - for subitem in metadata.resource_listdir('/'): - if subitem.endswith('.egg'): - subpath = os.path.join(path_item, subitem) - for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath): - yield dist - -register_finder(zipimport.zipimporter, find_eggs_in_zip) - -def find_nothing(importer, path_item, only=False): - return () -register_finder(object, find_nothing) - -def find_on_path(importer, path_item, only=False): - """Yield distributions accessible on a sys.path directory""" - path_item = _normalize_cached(path_item) - - if os.path.isdir(path_item) and os.access(path_item, os.R_OK): - if path_item.lower().endswith('.egg'): - # unpacked egg - yield Distribution.from_filename( - path_item, metadata=PathMetadata( - path_item, os.path.join(path_item,'EGG-INFO') - ) - ) - else: - # scan for .egg and .egg-info in directory - for entry in os.listdir(path_item): - lower = entry.lower() - if lower.endswith('.egg-info') or lower.endswith('.dist-info'): - fullpath = os.path.join(path_item, entry) - if os.path.isdir(fullpath): - # egg-info directory, allow getting metadata - metadata = PathMetadata(path_item, fullpath) - else: - metadata = FileMetadata(fullpath) - yield Distribution.from_location( - path_item, entry, metadata, precedence=DEVELOP_DIST - ) - elif not only and lower.endswith('.egg'): - dists = find_distributions(os.path.join(path_item, entry)) - for dist in dists: - yield dist - elif not only and lower.endswith('.egg-link'): - with open(os.path.join(path_item, entry)) as entry_file: - entry_lines = entry_file.readlines() - for line in entry_lines: - if not line.strip(): - continue - path = os.path.join(path_item, line.rstrip()) - dists = find_distributions(path) - for item in dists: - yield item - break -register_finder(pkgutil.ImpImporter, find_on_path) - -if importlib_machinery is not None: - register_finder(importlib_machinery.FileFinder, find_on_path) - -_declare_state('dict', _namespace_handlers={}) -_declare_state('dict', _namespace_packages={}) - - -def register_namespace_handler(importer_type, namespace_handler): - """Register `namespace_handler` to declare namespace packages - - `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item - handler), and `namespace_handler` is a callable like this:: - - def namespace_handler(importer, path_entry, moduleName, module): - # return a path_entry to use for child packages - - Namespace handlers are only called if the importer object has already - agreed that it can handle the relevant path item, and they should only - return a subpath if the module __path__ does not already contain an - equivalent subpath. For an example namespace handler, see - ``pkg_resources.file_ns_handler``. - """ - _namespace_handlers[importer_type] = namespace_handler - -def _handle_ns(packageName, path_item): - """Ensure that named package includes a subpath of path_item (if needed)""" - - importer = get_importer(path_item) - if importer is None: - return None - - if PY3: - # For python: - # * since python 3.3, the `imp.find_module()` is deprecated - # * `importlib.util.find_spec()` is new in python 3.4 - # - # For vim: - # * if it compiled with python 3.7 or newer, then the module 'vim' will - # use `find_spec` instead of `find_module` and `load_module`. - # * if it compiled with feature `+python3/dyn`, vim can load a python - # dynamically. The loaded python maybe has a different version compared - # with the python used when compiling vim. - # - # As these reason, it's hard to say we should use `find_spec` or - # `find_module`, so here use try/except for the sake of clearness of - # the logic. - # - # As `find_module` is deprecated for newer python, we try `find_spec` - # first. - try: - spec = importer.find_spec(packageName) - if spec: - loader = spec.loader - else: - return None - except AttributeError: - loader = importer.find_module(packageName) - else: - try: - loader = importer.find_module(packageName) - except ImportError: - return None - - if loader is None: - return None - - module = sys.modules.get(packageName) - if module is None: - module = sys.modules[packageName] = types.ModuleType(packageName) - module.__path__ = [] - _set_parent_ns(packageName) - elif not hasattr(module,'__path__'): - raise TypeError("Not a package:", packageName) - handler = _find_adapter(_namespace_handlers, importer) - subpath = handler(importer, path_item, packageName, module) - if subpath is not None: - path = module.__path__ - path.append(subpath) - loader.load_module(packageName) - for path_item in path: - if path_item not in module.__path__: - module.__path__.append(path_item) - return subpath - -def declare_namespace(packageName): - """Declare that package 'packageName' is a namespace package""" - - _imp.acquire_lock() - try: - if packageName in _namespace_packages: - return - - path, parent = sys.path, None - if '.' in packageName: - parent = '.'.join(packageName.split('.')[:-1]) - declare_namespace(parent) - if parent not in _namespace_packages: - __import__(parent) - try: - path = sys.modules[parent].__path__ - except AttributeError: - raise TypeError("Not a package:", parent) - - # Track what packages are namespaces, so when new path items are added, - # they can be updated - _namespace_packages.setdefault(parent,[]).append(packageName) - _namespace_packages.setdefault(packageName,[]) - - for path_item in path: - # Ensure all the parent's path items are reflected in the child, - # if they apply - _handle_ns(packageName, path_item) - - finally: - _imp.release_lock() - -def fixup_namespace_packages(path_item, parent=None): - """Ensure that previously-declared namespace packages include path_item""" - _imp.acquire_lock() - try: - for package in _namespace_packages.get(parent,()): - subpath = _handle_ns(package, path_item) - if subpath: - fixup_namespace_packages(subpath, package) - finally: - _imp.release_lock() - -def file_ns_handler(importer, path_item, packageName, module): - """Compute an ns-package subpath for a filesystem or zipfile importer""" - - subpath = os.path.join(path_item, packageName.split('.')[-1]) - normalized = _normalize_cached(subpath) - for item in module.__path__: - if _normalize_cached(item)==normalized: - break - else: - # Only return the path if it's not already there - return subpath - -register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) -register_namespace_handler(zipimport.zipimporter, file_ns_handler) - -if importlib_machinery is not None: - register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) - - -def null_ns_handler(importer, path_item, packageName, module): - return None - -register_namespace_handler(object, null_ns_handler) - - -def normalize_path(filename): - """Normalize a file/dir name for comparison purposes""" - return os.path.normcase(os.path.realpath(filename)) - -def _normalize_cached(filename, _cache={}): - try: - return _cache[filename] - except KeyError: - _cache[filename] = result = normalize_path(filename) - return result - -def _set_parent_ns(packageName): - parts = packageName.split('.') - name = parts.pop() - if parts: - parent = '.'.join(parts) - setattr(sys.modules[parent], name, sys.modules[packageName]) - - -def yield_lines(strs): - """Yield non-empty/non-comment lines of a string or sequence""" - if isinstance(strs, string_types): - for s in strs.splitlines(): - s = s.strip() - # skip blank lines/comments - if s and not s.startswith('#'): - yield s - else: - for ss in strs: - for s in yield_lines(ss): - yield s - -# whitespace and comment -LINE_END = re.compile(r"\s*(#.*)?$").match -# line continuation -CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match -# Distribution or extra -DISTRO = re.compile(r"\s*((\w|[-.])+)").match -# ver. info -VERSION = re.compile(r"\s*(<=?|>=?|===?|!=|~=)\s*((\w|[-.*_!+])+)").match -# comma between items -COMMA = re.compile(r"\s*,").match -OBRACKET = re.compile(r"\s*\[").match -CBRACKET = re.compile(r"\s*\]").match -MODULE = re.compile(r"\w+(\.\w+)*$").match -EGG_NAME = re.compile( - r""" - (?P[^-]+) ( - -(?P[^-]+) ( - -py(?P[^-]+) ( - -(?P.+) - )? - )? - )? - """, - re.VERBOSE | re.IGNORECASE, -).match - - -class EntryPoint(object): - """Object representing an advertised importable object""" - - def __init__(self, name, module_name, attrs=(), extras=(), dist=None): - if not MODULE(module_name): - raise ValueError("Invalid module name", module_name) - self.name = name - self.module_name = module_name - self.attrs = tuple(attrs) - self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras - self.dist = dist - - def __str__(self): - s = "%s = %s" % (self.name, self.module_name) - if self.attrs: - s += ':' + '.'.join(self.attrs) - if self.extras: - s += ' [%s]' % ','.join(self.extras) - return s - - def __repr__(self): - return "EntryPoint.parse(%r)" % str(self) - - def load(self, require=True, *args, **kwargs): - """ - Require packages for this EntryPoint, then resolve it. - """ - if not require or args or kwargs: - warnings.warn( - "Parameters to load are deprecated. Call .resolve and " - ".require separately.", - DeprecationWarning, - stacklevel=2, - ) - if require: - self.require(*args, **kwargs) - return self.resolve() - - def resolve(self): - """ - Resolve the entry point from its module and attrs. - """ - module = __import__(self.module_name, fromlist=['__name__'], level=0) - try: - return functools.reduce(getattr, self.attrs, module) - except AttributeError as exc: - raise ImportError(str(exc)) - - def require(self, env=None, installer=None): - if self.extras and not self.dist: - raise UnknownExtra("Can't require() without a distribution", self) - reqs = self.dist.requires(self.extras) - items = working_set.resolve(reqs, env, installer) - list(map(working_set.add, items)) - - pattern = re.compile( - r'\s*' - r'(?P.+?)\s*' - r'=\s*' - r'(?P[\w.]+)\s*' - r'(:\s*(?P[\w.]+))?\s*' - r'(?P\[.*\])?\s*$' - ) - - @classmethod - def parse(cls, src, dist=None): - """Parse a single entry point from string `src` - - Entry point syntax follows the form:: - - name = some.module:some.attr [extra1, extra2] - - The entry name and module name are required, but the ``:attrs`` and - ``[extras]`` parts are optional - """ - m = cls.pattern.match(src) - if not m: - msg = "EntryPoint must be in 'name=module:attrs [extras]' format" - raise ValueError(msg, src) - res = m.groupdict() - extras = cls._parse_extras(res['extras']) - attrs = res['attr'].split('.') if res['attr'] else () - return cls(res['name'], res['module'], attrs, extras, dist) - - @classmethod - def _parse_extras(cls, extras_spec): - if not extras_spec: - return () - req = Requirement.parse('x' + extras_spec) - if req.specs: - raise ValueError() - return req.extras - - @classmethod - def parse_group(cls, group, lines, dist=None): - """Parse an entry point group""" - if not MODULE(group): - raise ValueError("Invalid group name", group) - this = {} - for line in yield_lines(lines): - ep = cls.parse(line, dist) - if ep.name in this: - raise ValueError("Duplicate entry point", group, ep.name) - this[ep.name]=ep - return this - - @classmethod - def parse_map(cls, data, dist=None): - """Parse a map of entry point groups""" - if isinstance(data, dict): - data = data.items() - else: - data = split_sections(data) - maps = {} - for group, lines in data: - if group is None: - if not lines: - continue - raise ValueError("Entry points must be listed in groups") - group = group.strip() - if group in maps: - raise ValueError("Duplicate group name", group) - maps[group] = cls.parse_group(group, lines, dist) - return maps - - -def _remove_md5_fragment(location): - if not location: - return '' - parsed = urlparse(location) - if parsed[-1].startswith('md5='): - return urlunparse(parsed[:-1] + ('',)) - return location - - -class Distribution(object): - """Wrap an actual or potential sys.path entry w/metadata""" - PKG_INFO = 'PKG-INFO' - - def __init__(self, location=None, metadata=None, project_name=None, - version=None, py_version=PY_MAJOR, platform=None, - precedence=EGG_DIST): - self.project_name = safe_name(project_name or 'Unknown') - if version is not None: - self._version = safe_version(version) - self.py_version = py_version - self.platform = platform - self.location = location - self.precedence = precedence - self._provider = metadata or empty_provider - - @classmethod - def from_location(cls, location, basename, metadata=None,**kw): - project_name, version, py_version, platform = [None]*4 - basename, ext = os.path.splitext(basename) - if ext.lower() in _distributionImpl: - # .dist-info gets much metadata differently - match = EGG_NAME(basename) - if match: - project_name, version, py_version, platform = match.group( - 'name','ver','pyver','plat' - ) - cls = _distributionImpl[ext.lower()] - return cls( - location, metadata, project_name=project_name, version=version, - py_version=py_version, platform=platform, **kw - ) - - @property - def hashcmp(self): - return ( - self.parsed_version, - self.precedence, - self.key, - _remove_md5_fragment(self.location), - self.py_version or '', - self.platform or '', - ) - - def __hash__(self): - return hash(self.hashcmp) - - def __lt__(self, other): - return self.hashcmp < other.hashcmp - - def __le__(self, other): - return self.hashcmp <= other.hashcmp - - def __gt__(self, other): - return self.hashcmp > other.hashcmp - - def __ge__(self, other): - return self.hashcmp >= other.hashcmp - - def __eq__(self, other): - if not isinstance(other, self.__class__): - # It's not a Distribution, so they are not equal - return False - return self.hashcmp == other.hashcmp - - def __ne__(self, other): - return not self == other - - # These properties have to be lazy so that we don't have to load any - # metadata until/unless it's actually needed. (i.e., some distributions - # may not know their name or version without loading PKG-INFO) - - @property - def key(self): - try: - return self._key - except AttributeError: - self._key = key = self.project_name.lower() - return key - - @property - def parsed_version(self): - if not hasattr(self, "_parsed_version"): - self._parsed_version = parse_version(self.version) - - return self._parsed_version - - def _warn_legacy_version(self): - LV = packaging.version.LegacyVersion - is_legacy = isinstance(self._parsed_version, LV) - if not is_legacy: - return - - # While an empty version is technically a legacy version and - # is not a valid PEP 440 version, it's also unlikely to - # actually come from someone and instead it is more likely that - # it comes from setuptools attempting to parse a filename and - # including it in the list. So for that we'll gate this warning - # on if the version is anything at all or not. - if not self.version: - return - - tmpl = textwrap.dedent(""" - '{project_name} ({version})' is being parsed as a legacy, - non PEP 440, - version. You may find odd behavior and sort order. - In particular it will be sorted as less than 0.0. It - is recommended to migrate to PEP 440 compatible - versions. - """).strip().replace('\n', ' ') - - warnings.warn(tmpl.format(**vars(self)), PEP440Warning) - - @property - def version(self): - try: - return self._version - except AttributeError: - for line in self._get_metadata(self.PKG_INFO): - if line.lower().startswith('version:'): - self._version = safe_version(line.split(':',1)[1].strip()) - return self._version - else: - tmpl = "Missing 'Version:' header and/or %s file" - raise ValueError(tmpl % self.PKG_INFO, self) - - @property - def _dep_map(self): - try: - return self.__dep_map - except AttributeError: - dm = self.__dep_map = {None: []} - for name in 'requires.txt', 'depends.txt': - for extra, reqs in split_sections(self._get_metadata(name)): - if extra: - if ':' in extra: - extra, marker = extra.split(':', 1) - if invalid_marker(marker): - # XXX warn - reqs=[] - elif not evaluate_marker(marker): - reqs=[] - extra = safe_extra(extra) or None - dm.setdefault(extra,[]).extend(parse_requirements(reqs)) - return dm - - def requires(self, extras=()): - """List of Requirements needed for this distro if `extras` are used""" - dm = self._dep_map - deps = [] - deps.extend(dm.get(None, ())) - for ext in extras: - try: - deps.extend(dm[safe_extra(ext)]) - except KeyError: - raise UnknownExtra( - "%s has no such extra feature %r" % (self, ext) - ) - return deps - - def _get_metadata(self, name): - if self.has_metadata(name): - for line in self.get_metadata_lines(name): - yield line - - def activate(self, path=None): - """Ensure distribution is importable on `path` (default=sys.path)""" - if path is None: - path = sys.path - self.insert_on(path) - if path is sys.path: - fixup_namespace_packages(self.location) - for pkg in self._get_metadata('namespace_packages.txt'): - if pkg in sys.modules: - declare_namespace(pkg) - - def egg_name(self): - """Return what this distribution's standard .egg filename should be""" - filename = "%s-%s-py%s" % ( - to_filename(self.project_name), to_filename(self.version), - self.py_version or PY_MAJOR - ) - - if self.platform: - filename += '-' + self.platform - return filename - - def __repr__(self): - if self.location: - return "%s (%s)" % (self, self.location) - else: - return str(self) - - def __str__(self): - try: - version = getattr(self, 'version', None) - except ValueError: - version = None - version = version or "[unknown version]" - return "%s %s" % (self.project_name, version) - - def __getattr__(self, attr): - """Delegate all unrecognized public attributes to .metadata provider""" - if attr.startswith('_'): - raise AttributeError(attr) - return getattr(self._provider, attr) - - @classmethod - def from_filename(cls, filename, metadata=None, **kw): - return cls.from_location( - _normalize_cached(filename), os.path.basename(filename), metadata, - **kw - ) - - def as_requirement(self): - """Return a ``Requirement`` that matches this distribution exactly""" - if isinstance(self.parsed_version, packaging.version.Version): - spec = "%s==%s" % (self.project_name, self.parsed_version) - else: - spec = "%s===%s" % (self.project_name, self.parsed_version) - - return Requirement.parse(spec) - - def load_entry_point(self, group, name): - """Return the `name` entry point of `group` or raise ImportError""" - ep = self.get_entry_info(group, name) - if ep is None: - raise ImportError("Entry point %r not found" % ((group, name),)) - return ep.load() - - def get_entry_map(self, group=None): - """Return the entry point map for `group`, or the full entry map""" - try: - ep_map = self._ep_map - except AttributeError: - ep_map = self._ep_map = EntryPoint.parse_map( - self._get_metadata('entry_points.txt'), self - ) - if group is not None: - return ep_map.get(group,{}) - return ep_map - - def get_entry_info(self, group, name): - """Return the EntryPoint object for `group`+`name`, or ``None``""" - return self.get_entry_map(group).get(name) - - def insert_on(self, path, loc = None): - """Insert self.location in path before its nearest parent directory""" - - loc = loc or self.location - if not loc: - return - - nloc = _normalize_cached(loc) - bdir = os.path.dirname(nloc) - npath= [(p and _normalize_cached(p) or p) for p in path] - - for p, item in enumerate(npath): - if item == nloc: - break - elif item == bdir and self.precedence == EGG_DIST: - # if it's an .egg, give it precedence over its directory - if path is sys.path: - self.check_version_conflict() - path.insert(p, loc) - npath.insert(p, nloc) - break - else: - if path is sys.path: - self.check_version_conflict() - path.append(loc) - return - - # p is the spot where we found or inserted loc; now remove duplicates - while True: - try: - np = npath.index(nloc, p+1) - except ValueError: - break - else: - del npath[np], path[np] - # ha! - p = np - - return - - def check_version_conflict(self): - if self.key == 'setuptools': - # ignore the inevitable setuptools self-conflicts :( - return - - nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) - loc = normalize_path(self.location) - for modname in self._get_metadata('top_level.txt'): - if (modname not in sys.modules or modname in nsp - or modname in _namespace_packages): - continue - if modname in ('pkg_resources', 'setuptools', 'site'): - continue - fn = getattr(sys.modules[modname], '__file__', None) - if fn and (normalize_path(fn).startswith(loc) or - fn.startswith(self.location)): - continue - issue_warning( - "Module %s was already imported from %s, but %s is being added" - " to sys.path" % (modname, fn, self.location), - ) - - def has_version(self): - try: - self.version - except ValueError: - issue_warning("Unbuilt egg for " + repr(self)) - return False - return True - - def clone(self,**kw): - """Copy this distribution, substituting in any changed keyword args""" - names = 'project_name version py_version platform location precedence' - for attr in names.split(): - kw.setdefault(attr, getattr(self, attr, None)) - kw.setdefault('metadata', self._provider) - return self.__class__(**kw) - - @property - def extras(self): - return [dep for dep in self._dep_map if dep] - - -class DistInfoDistribution(Distribution): - """Wrap an actual or potential sys.path entry w/metadata, .dist-info style""" - PKG_INFO = 'METADATA' - EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") - - @property - def _parsed_pkg_info(self): - """Parse and cache metadata""" - try: - return self._pkg_info - except AttributeError: - metadata = self.get_metadata(self.PKG_INFO) - self._pkg_info = email.parser.Parser().parsestr(metadata) - return self._pkg_info - - @property - def _dep_map(self): - try: - return self.__dep_map - except AttributeError: - self.__dep_map = self._compute_dependencies() - return self.__dep_map - - def _preparse_requirement(self, requires_dist): - """Convert 'Foobar (1); baz' to ('Foobar ==1', 'baz') - Split environment marker, add == prefix to version specifiers as - necessary, and remove parenthesis. - """ - parts = requires_dist.split(';', 1) + [''] - distvers = parts[0].strip() - mark = parts[1].strip() - distvers = re.sub(self.EQEQ, r"\1==\2\3", distvers) - distvers = distvers.replace('(', '').replace(')', '') - return (distvers, mark) - - def _compute_dependencies(self): - """Recompute this distribution's dependencies.""" - from _markerlib import compile as compile_marker - dm = self.__dep_map = {None: []} - - reqs = [] - # Including any condition expressions - for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: - distvers, mark = self._preparse_requirement(req) - parsed = next(parse_requirements(distvers)) - parsed.marker_fn = compile_marker(mark) - reqs.append(parsed) - - def reqs_for_extra(extra): - for req in reqs: - if req.marker_fn(override={'extra':extra}): - yield req - - common = frozenset(reqs_for_extra(None)) - dm[None].extend(common) - - for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: - extra = safe_extra(extra.strip()) - dm[extra] = list(frozenset(reqs_for_extra(extra)) - common) - - return dm - - -_distributionImpl = { - '.egg': Distribution, - '.egg-info': Distribution, - '.dist-info': DistInfoDistribution, - } - - -def issue_warning(*args,**kw): - level = 1 - g = globals() - try: - # find the first stack frame that is *not* code in - # the pkg_resources module, to use for the warning - while sys._getframe(level).f_globals is g: - level += 1 - except ValueError: - pass - warnings.warn(stacklevel=level + 1, *args, **kw) - - -class RequirementParseError(ValueError): - def __str__(self): - return ' '.join(self.args) - - -def parse_requirements(strs): - """Yield ``Requirement`` objects for each specification in `strs` - - `strs` must be a string, or a (possibly-nested) iterable thereof. - """ - # create a steppable iterator, so we can handle \-continuations - lines = iter(yield_lines(strs)) - - def scan_list(ITEM, TERMINATOR, line, p, groups, item_name): - - items = [] - - while not TERMINATOR(line, p): - if CONTINUE(line, p): - try: - line = next(lines) - p = 0 - except StopIteration: - msg = "\\ must not appear on the last nonblank line" - raise RequirementParseError(msg) - - match = ITEM(line, p) - if not match: - msg = "Expected " + item_name + " in" - raise RequirementParseError(msg, line, "at", line[p:]) - - items.append(match.group(*groups)) - p = match.end() - - match = COMMA(line, p) - if match: - # skip the comma - p = match.end() - elif not TERMINATOR(line, p): - msg = "Expected ',' or end-of-list in" - raise RequirementParseError(msg, line, "at", line[p:]) - - match = TERMINATOR(line, p) - # skip the terminator, if any - if match: - p = match.end() - return line, p, items - - for line in lines: - match = DISTRO(line) - if not match: - raise RequirementParseError("Missing distribution spec", line) - project_name = match.group(1) - p = match.end() - extras = [] - - match = OBRACKET(line, p) - if match: - p = match.end() - line, p, extras = scan_list( - DISTRO, CBRACKET, line, p, (1,), "'extra' name" - ) - - line, p, specs = scan_list(VERSION, LINE_END, line, p, (1, 2), - "version spec") - specs = [(op, val) for op, val in specs] - yield Requirement(project_name, specs, extras) - - -class Requirement: - def __init__(self, project_name, specs, extras): - """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" - self.unsafe_name, project_name = project_name, safe_name(project_name) - self.project_name, self.key = project_name, project_name.lower() - self.specifier = packaging.specifiers.SpecifierSet( - ",".join(["".join([x, y]) for x, y in specs]) - ) - self.specs = specs - self.extras = tuple(map(safe_extra, extras)) - self.hashCmp = ( - self.key, - self.specifier, - frozenset(self.extras), - ) - self.__hash = hash(self.hashCmp) - - def __str__(self): - extras = ','.join(self.extras) - if extras: - extras = '[%s]' % extras - return '%s%s%s' % (self.project_name, extras, self.specifier) - - def __eq__(self, other): - return ( - isinstance(other, Requirement) and - self.hashCmp == other.hashCmp - ) - - def __ne__(self, other): - return not self == other - - def __contains__(self, item): - if isinstance(item, Distribution): - if item.key != self.key: - return False - - item = item.version - - # Allow prereleases always in order to match the previous behavior of - # this method. In the future this should be smarter and follow PEP 440 - # more accurately. - return self.specifier.contains(item, prereleases=True) - - def __hash__(self): - return self.__hash - - def __repr__(self): return "Requirement.parse(%r)" % str(self) - - @staticmethod - def parse(s): - reqs = list(parse_requirements(s)) - if reqs: - if len(reqs) == 1: - return reqs[0] - raise ValueError("Expected only one requirement", s) - raise ValueError("No requirements found", s) - - -def _get_mro(cls): - """Get an mro for a type or classic class""" - if not isinstance(cls, type): - class cls(cls, object): pass - return cls.__mro__[1:] - return cls.__mro__ - -def _find_adapter(registry, ob): - """Return an adapter factory for `ob` from `registry`""" - for t in _get_mro(getattr(ob, '__class__', type(ob))): - if t in registry: - return registry[t] - - -def ensure_directory(path): - """Ensure that the parent directory of `path` exists""" - dirname = os.path.dirname(path) - if not os.path.isdir(dirname): - os.makedirs(dirname) - - -def _bypass_ensure_directory(path): - """Sandbox-bypassing version of ensure_directory()""" - if not WRITE_SUPPORT: - raise IOError('"os.mkdir" not supported on this platform.') - dirname, filename = split(path) - if dirname and filename and not isdir(dirname): - _bypass_ensure_directory(dirname) - mkdir(dirname, 0o755) - - -def split_sections(s): - """Split a string or iterable thereof into (section, content) pairs - - Each ``section`` is a stripped version of the section header ("[section]") - and each ``content`` is a list of stripped lines excluding blank lines and - comment-only lines. If there are any such lines before the first section - header, they're returned in a first ``section`` of ``None``. - """ - section = None - content = [] - for line in yield_lines(s): - if line.startswith("["): - if line.endswith("]"): - if section or content: - yield section, content - section = line[1:-1].strip() - content = [] - else: - raise ValueError("Invalid section heading", line) - else: - content.append(line) - - # wrap up last segment - yield section, content - -def _mkstemp(*args,**kw): - old_open = os.open - try: - # temporarily bypass sandboxing - os.open = os_open - return tempfile.mkstemp(*args,**kw) - finally: - # and then put it back - os.open = old_open - - -# Silence the PEP440Warning by default, so that end users don't get hit by it -# randomly just because they use pkg_resources. We want to append the rule -# because we want earlier uses of filterwarnings to take precedence over this -# one. -warnings.filterwarnings("ignore", category=PEP440Warning, append=True) - - -# from jaraco.functools 1.3 -def _call_aside(f, *args, **kwargs): - f(*args, **kwargs) - return f - - -@_call_aside -def _initialize(g=globals()): - "Set up global resource manager (deliberately not state-saved)" - manager = ResourceManager() - g['_manager'] = manager - for name in dir(manager): - if not name.startswith('_'): - g[name] = getattr(manager, name) - - -@_call_aside -def _initialize_master_working_set(): - """ - Prepare the master working set and make the ``require()`` - API available. - - This function has explicit effects on the global state - of pkg_resources. It is intended to be invoked once at - the initialization of this module. - - Invocation by other packages is unsupported and done - at their own risk. - """ - working_set = WorkingSet._build_master() - _declare_state('object', working_set=working_set) - - require = working_set.require - iter_entry_points = working_set.iter_entry_points - add_activation_listener = working_set.subscribe - run_script = working_set.run_script - # backward compatibility - run_main = run_script - # Activate all distributions already on sys.path, and ensure that - # all distributions added to the working set in the future (e.g. by - # calling ``require()``) will get activated as well. - add_activation_listener(lambda dist: dist.activate()) - working_set.entries=[] - # match order - list(map(working_set.add_entry, sys.path)) - globals().update(locals()) diff --git a/pymode/libs/pkg_resources/_vendor/__init__.py b/pymode/libs/pkg_resources/_vendor/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/pymode/libs/pkg_resources/_vendor/packaging/__about__.py b/pymode/libs/pkg_resources/_vendor/packaging/__about__.py deleted file mode 100644 index eadb794e..00000000 --- a/pymode/libs/pkg_resources/_vendor/packaging/__about__.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2014 Donald Stufft -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import absolute_import, division, print_function - -__all__ = [ - "__title__", "__summary__", "__uri__", "__version__", "__author__", - "__email__", "__license__", "__copyright__", -] - -__title__ = "packaging" -__summary__ = "Core utilities for Python packages" -__uri__ = "https://github.com/pypa/packaging" - -__version__ = "15.3" - -__author__ = "Donald Stufft" -__email__ = "donald@stufft.io" - -__license__ = "Apache License, Version 2.0" -__copyright__ = "Copyright 2014 %s" % __author__ diff --git a/pymode/libs/pkg_resources/_vendor/packaging/__init__.py b/pymode/libs/pkg_resources/_vendor/packaging/__init__.py deleted file mode 100644 index c39a8eab..00000000 --- a/pymode/libs/pkg_resources/_vendor/packaging/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2014 Donald Stufft -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import absolute_import, division, print_function - -from .__about__ import ( - __author__, __copyright__, __email__, __license__, __summary__, __title__, - __uri__, __version__ -) - -__all__ = [ - "__title__", "__summary__", "__uri__", "__version__", "__author__", - "__email__", "__license__", "__copyright__", -] diff --git a/pymode/libs/pkg_resources/_vendor/packaging/_compat.py b/pymode/libs/pkg_resources/_vendor/packaging/_compat.py deleted file mode 100644 index 5c396cea..00000000 --- a/pymode/libs/pkg_resources/_vendor/packaging/_compat.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2014 Donald Stufft -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import absolute_import, division, print_function - -import sys - - -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 - -# flake8: noqa - -if PY3: - string_types = str, -else: - string_types = basestring, - - -def with_metaclass(meta, *bases): - """ - Create a base class with a metaclass. - """ - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(meta): - def __new__(cls, name, this_bases, d): - return meta(name, bases, d) - return type.__new__(metaclass, 'temporary_class', (), {}) diff --git a/pymode/libs/pkg_resources/_vendor/packaging/_structures.py b/pymode/libs/pkg_resources/_vendor/packaging/_structures.py deleted file mode 100644 index 0ae9bb52..00000000 --- a/pymode/libs/pkg_resources/_vendor/packaging/_structures.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright 2014 Donald Stufft -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import absolute_import, division, print_function - - -class Infinity(object): - - def __repr__(self): - return "Infinity" - - def __hash__(self): - return hash(repr(self)) - - def __lt__(self, other): - return False - - def __le__(self, other): - return False - - def __eq__(self, other): - return isinstance(other, self.__class__) - - def __ne__(self, other): - return not isinstance(other, self.__class__) - - def __gt__(self, other): - return True - - def __ge__(self, other): - return True - - def __neg__(self): - return NegativeInfinity - -Infinity = Infinity() - - -class NegativeInfinity(object): - - def __repr__(self): - return "-Infinity" - - def __hash__(self): - return hash(repr(self)) - - def __lt__(self, other): - return True - - def __le__(self, other): - return True - - def __eq__(self, other): - return isinstance(other, self.__class__) - - def __ne__(self, other): - return not isinstance(other, self.__class__) - - def __gt__(self, other): - return False - - def __ge__(self, other): - return False - - def __neg__(self): - return Infinity - -NegativeInfinity = NegativeInfinity() diff --git a/pymode/libs/pkg_resources/_vendor/packaging/specifiers.py b/pymode/libs/pkg_resources/_vendor/packaging/specifiers.py deleted file mode 100644 index 891664f0..00000000 --- a/pymode/libs/pkg_resources/_vendor/packaging/specifiers.py +++ /dev/null @@ -1,784 +0,0 @@ -# Copyright 2014 Donald Stufft -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import absolute_import, division, print_function - -import abc -import functools -import itertools -import re - -from ._compat import string_types, with_metaclass -from .version import Version, LegacyVersion, parse - - -class InvalidSpecifier(ValueError): - """ - An invalid specifier was found, users should refer to PEP 440. - """ - - -class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): - - @abc.abstractmethod - def __str__(self): - """ - Returns the str representation of this Specifier like object. This - should be representative of the Specifier itself. - """ - - @abc.abstractmethod - def __hash__(self): - """ - Returns a hash value for this Specifier like object. - """ - - @abc.abstractmethod - def __eq__(self, other): - """ - Returns a boolean representing whether or not the two Specifier like - objects are equal. - """ - - @abc.abstractmethod - def __ne__(self, other): - """ - Returns a boolean representing whether or not the two Specifier like - objects are not equal. - """ - - @abc.abstractproperty - def prereleases(self): - """ - Returns whether or not pre-releases as a whole are allowed by this - specifier. - """ - - @prereleases.setter - def prereleases(self, value): - """ - Sets whether or not pre-releases as a whole are allowed by this - specifier. - """ - - @abc.abstractmethod - def contains(self, item, prereleases=None): - """ - Determines if the given item is contained within this specifier. - """ - - @abc.abstractmethod - def filter(self, iterable, prereleases=None): - """ - Takes an iterable of items and filters them so that only items which - are contained within this specifier are allowed in it. - """ - - -class _IndividualSpecifier(BaseSpecifier): - - _operators = {} - - def __init__(self, spec="", prereleases=None): - match = self._regex.search(spec) - if not match: - raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) - - self._spec = ( - match.group("operator").strip(), - match.group("version").strip(), - ) - - # Store whether or not this Specifier should accept prereleases - self._prereleases = prereleases - - def __repr__(self): - pre = ( - ", prereleases={0!r}".format(self.prereleases) - if self._prereleases is not None - else "" - ) - - return "<{0}({1!r}{2})>".format( - self.__class__.__name__, - str(self), - pre, - ) - - def __str__(self): - return "{0}{1}".format(*self._spec) - - def __hash__(self): - return hash(self._spec) - - def __eq__(self, other): - if isinstance(other, string_types): - try: - other = self.__class__(other) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._spec == other._spec - - def __ne__(self, other): - if isinstance(other, string_types): - try: - other = self.__class__(other) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._spec != other._spec - - def _get_operator(self, op): - return getattr(self, "_compare_{0}".format(self._operators[op])) - - def _coerce_version(self, version): - if not isinstance(version, (LegacyVersion, Version)): - version = parse(version) - return version - - @property - def operator(self): - return self._spec[0] - - @property - def version(self): - return self._spec[1] - - @property - def prereleases(self): - return self._prereleases - - @prereleases.setter - def prereleases(self, value): - self._prereleases = value - - def __contains__(self, item): - return self.contains(item) - - def contains(self, item, prereleases=None): - # Determine if prereleases are to be allowed or not. - if prereleases is None: - prereleases = self.prereleases - - # Normalize item to a Version or LegacyVersion, this allows us to have - # a shortcut for ``"2.0" in Specifier(">=2") - item = self._coerce_version(item) - - # Determine if we should be supporting prereleases in this specifier - # or not, if we do not support prereleases than we can short circuit - # logic if this version is a prereleases. - if item.is_prerelease and not prereleases: - return False - - # Actually do the comparison to determine if this item is contained - # within this Specifier or not. - return self._get_operator(self.operator)(item, self.version) - - def filter(self, iterable, prereleases=None): - yielded = False - found_prereleases = [] - - kw = {"prereleases": prereleases if prereleases is not None else True} - - # Attempt to iterate over all the values in the iterable and if any of - # them match, yield them. - for version in iterable: - parsed_version = self._coerce_version(version) - - if self.contains(parsed_version, **kw): - # If our version is a prerelease, and we were not set to allow - # prereleases, then we'll store it for later incase nothing - # else matches this specifier. - if (parsed_version.is_prerelease - and not (prereleases or self.prereleases)): - found_prereleases.append(version) - # Either this is not a prerelease, or we should have been - # accepting prereleases from the begining. - else: - yielded = True - yield version - - # Now that we've iterated over everything, determine if we've yielded - # any values, and if we have not and we have any prereleases stored up - # then we will go ahead and yield the prereleases. - if not yielded and found_prereleases: - for version in found_prereleases: - yield version - - -class LegacySpecifier(_IndividualSpecifier): - - _regex = re.compile( - r""" - ^ - \s* - (?P(==|!=|<=|>=|<|>)) - \s* - (?P - [^\s]* # We just match everything, except for whitespace since this - # is a "legacy" specifier and the version string can be just - # about anything. - ) - \s* - $ - """, - re.VERBOSE | re.IGNORECASE, - ) - - _operators = { - "==": "equal", - "!=": "not_equal", - "<=": "less_than_equal", - ">=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - } - - def _coerce_version(self, version): - if not isinstance(version, LegacyVersion): - version = LegacyVersion(str(version)) - return version - - def _compare_equal(self, prospective, spec): - return prospective == self._coerce_version(spec) - - def _compare_not_equal(self, prospective, spec): - return prospective != self._coerce_version(spec) - - def _compare_less_than_equal(self, prospective, spec): - return prospective <= self._coerce_version(spec) - - def _compare_greater_than_equal(self, prospective, spec): - return prospective >= self._coerce_version(spec) - - def _compare_less_than(self, prospective, spec): - return prospective < self._coerce_version(spec) - - def _compare_greater_than(self, prospective, spec): - return prospective > self._coerce_version(spec) - - -def _require_version_compare(fn): - @functools.wraps(fn) - def wrapped(self, prospective, spec): - if not isinstance(prospective, Version): - return False - return fn(self, prospective, spec) - return wrapped - - -class Specifier(_IndividualSpecifier): - - _regex = re.compile( - r""" - ^ - \s* - (?P(~=|==|!=|<=|>=|<|>|===)) - (?P - (?: - # The identity operators allow for an escape hatch that will - # do an exact string match of the version you wish to install. - # This will not be parsed by PEP 440 and we cannot determine - # any semantic meaning from it. This operator is discouraged - # but included entirely as an escape hatch. - (?<====) # Only match for the identity operator - \s* - [^\s]* # We just match everything, except for whitespace - # since we are only testing for strict identity. - ) - | - (?: - # The (non)equality operators allow for wild card and local - # versions to be specified so we have to define these two - # operators separately to enable that. - (?<===|!=) # Only match for equals and not equals - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)* # release - (?: # pre release - [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - - # You cannot use a wild card and a dev or local version - # together so group them with a | and make them optional. - (?: - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local - | - \.\* # Wild card syntax of .* - )? - ) - | - (?: - # The compatible operator requires at least two digits in the - # release segment. - (?<=~=) # Only match for the compatible operator - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) - (?: # pre release - [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - ) - | - (?: - # All other operators only allow a sub set of what the - # (non)equality operators do. Specifically they do not allow - # local versions to be specified nor do they allow the prefix - # matching wild cards. - (?=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - "===": "arbitrary", - } - - @_require_version_compare - def _compare_compatible(self, prospective, spec): - # Compatible releases have an equivalent combination of >= and ==. That - # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to - # implement this in terms of the other specifiers instead of - # implementing it ourselves. The only thing we need to do is construct - # the other specifiers. - - # We want everything but the last item in the version, but we want to - # ignore post and dev releases and we want to treat the pre-release as - # it's own separate segment. - prefix = ".".join( - list( - itertools.takewhile( - lambda x: (not x.startswith("post") - and not x.startswith("dev")), - _version_split(spec), - ) - )[:-1] - ) - - # Add the prefix notation to the end of our string - prefix += ".*" - - return (self._get_operator(">=")(prospective, spec) - and self._get_operator("==")(prospective, prefix)) - - @_require_version_compare - def _compare_equal(self, prospective, spec): - # We need special logic to handle prefix matching - if spec.endswith(".*"): - # Split the spec out by dots, and pretend that there is an implicit - # dot in between a release segment and a pre-release segment. - spec = _version_split(spec[:-2]) # Remove the trailing .* - - # Split the prospective version out by dots, and pretend that there - # is an implicit dot in between a release segment and a pre-release - # segment. - prospective = _version_split(str(prospective)) - - # Shorten the prospective version to be the same length as the spec - # so that we can determine if the specifier is a prefix of the - # prospective version or not. - prospective = prospective[:len(spec)] - - # Pad out our two sides with zeros so that they both equal the same - # length. - spec, prospective = _pad_version(spec, prospective) - else: - # Convert our spec string into a Version - spec = Version(spec) - - # If the specifier does not have a local segment, then we want to - # act as if the prospective version also does not have a local - # segment. - if not spec.local: - prospective = Version(prospective.public) - - return prospective == spec - - @_require_version_compare - def _compare_not_equal(self, prospective, spec): - return not self._compare_equal(prospective, spec) - - @_require_version_compare - def _compare_less_than_equal(self, prospective, spec): - return prospective <= Version(spec) - - @_require_version_compare - def _compare_greater_than_equal(self, prospective, spec): - return prospective >= Version(spec) - - @_require_version_compare - def _compare_less_than(self, prospective, spec): - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec) - - # Check to see if the prospective version is less than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective < spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a pre-release version, that we do not accept pre-release - # versions for the version mentioned in the specifier (e.g. <3.1 should - # not match 3.1.dev0, but should match 3.0.dev0). - if not spec.is_prerelease and prospective.is_prerelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # less than the spec version *and* it's not a pre-release of the same - # version in the spec. - return True - - @_require_version_compare - def _compare_greater_than(self, prospective, spec): - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec) - - # Check to see if the prospective version is greater than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective > spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a post-release version, that we do not accept - # post-release versions for the version mentioned in the specifier - # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). - if not spec.is_postrelease and prospective.is_postrelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # Ensure that we do not allow a local version of the version mentioned - # in the specifier, which is techincally greater than, to match. - if prospective.local is not None: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # greater than the spec version *and* it's not a pre-release of the - # same version in the spec. - return True - - def _compare_arbitrary(self, prospective, spec): - return str(prospective).lower() == str(spec).lower() - - @property - def prereleases(self): - # If there is an explicit prereleases set for this, then we'll just - # blindly use that. - if self._prereleases is not None: - return self._prereleases - - # Look at all of our specifiers and determine if they are inclusive - # operators, and if they are if they are including an explicit - # prerelease. - operator, version = self._spec - if operator in ["==", ">=", "<=", "~=", "==="]: - # The == specifier can include a trailing .*, if it does we - # want to remove before parsing. - if operator == "==" and version.endswith(".*"): - version = version[:-2] - - # Parse the version, and if it is a pre-release than this - # specifier allows pre-releases. - if parse(version).is_prerelease: - return True - - return False - - @prereleases.setter - def prereleases(self, value): - self._prereleases = value - - -_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") - - -def _version_split(version): - result = [] - for item in version.split("."): - match = _prefix_regex.search(item) - if match: - result.extend(match.groups()) - else: - result.append(item) - return result - - -def _pad_version(left, right): - left_split, right_split = [], [] - - # Get the release segment of our versions - left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) - right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) - - # Get the rest of our versions - left_split.append(left[len(left_split):]) - right_split.append(left[len(right_split):]) - - # Insert our padding - left_split.insert( - 1, - ["0"] * max(0, len(right_split[0]) - len(left_split[0])), - ) - right_split.insert( - 1, - ["0"] * max(0, len(left_split[0]) - len(right_split[0])), - ) - - return ( - list(itertools.chain(*left_split)), - list(itertools.chain(*right_split)), - ) - - -class SpecifierSet(BaseSpecifier): - - def __init__(self, specifiers="", prereleases=None): - # Split on , to break each indidivual specifier into it's own item, and - # strip each item to remove leading/trailing whitespace. - specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] - - # Parsed each individual specifier, attempting first to make it a - # Specifier and falling back to a LegacySpecifier. - parsed = set() - for specifier in specifiers: - try: - parsed.add(Specifier(specifier)) - except InvalidSpecifier: - parsed.add(LegacySpecifier(specifier)) - - # Turn our parsed specifiers into a frozen set and save them for later. - self._specs = frozenset(parsed) - - # Store our prereleases value so we can use it later to determine if - # we accept prereleases or not. - self._prereleases = prereleases - - def __repr__(self): - pre = ( - ", prereleases={0!r}".format(self.prereleases) - if self._prereleases is not None - else "" - ) - - return "".format(str(self), pre) - - def __str__(self): - return ",".join(sorted(str(s) for s in self._specs)) - - def __hash__(self): - return hash(self._specs) - - def __and__(self, other): - if isinstance(other, string_types): - other = SpecifierSet(other) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - specifier = SpecifierSet() - specifier._specs = frozenset(self._specs | other._specs) - - if self._prereleases is None and other._prereleases is not None: - specifier._prereleases = other._prereleases - elif self._prereleases is not None and other._prereleases is None: - specifier._prereleases = self._prereleases - elif self._prereleases == other._prereleases: - specifier._prereleases = self._prereleases - else: - raise ValueError( - "Cannot combine SpecifierSets with True and False prerelease " - "overrides." - ) - - return specifier - - def __eq__(self, other): - if isinstance(other, string_types): - other = SpecifierSet(other) - elif isinstance(other, _IndividualSpecifier): - other = SpecifierSet(str(other)) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - return self._specs == other._specs - - def __ne__(self, other): - if isinstance(other, string_types): - other = SpecifierSet(other) - elif isinstance(other, _IndividualSpecifier): - other = SpecifierSet(str(other)) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - return self._specs != other._specs - - def __len__(self): - return len(self._specs) - - def __iter__(self): - return iter(self._specs) - - @property - def prereleases(self): - # If we have been given an explicit prerelease modifier, then we'll - # pass that through here. - if self._prereleases is not None: - return self._prereleases - - # If we don't have any specifiers, and we don't have a forced value, - # then we'll just return None since we don't know if this should have - # pre-releases or not. - if not self._specs: - return None - - # Otherwise we'll see if any of the given specifiers accept - # prereleases, if any of them do we'll return True, otherwise False. - return any(s.prereleases for s in self._specs) - - @prereleases.setter - def prereleases(self, value): - self._prereleases = value - - def __contains__(self, item): - return self.contains(item) - - def contains(self, item, prereleases=None): - # Ensure that our item is a Version or LegacyVersion instance. - if not isinstance(item, (LegacyVersion, Version)): - item = parse(item) - - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # We can determine if we're going to allow pre-releases by looking to - # see if any of the underlying items supports them. If none of them do - # and this item is a pre-release then we do not allow it and we can - # short circuit that here. - # Note: This means that 1.0.dev1 would not be contained in something - # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 - if not prereleases and item.is_prerelease: - return False - - # We simply dispatch to the underlying specs here to make sure that the - # given version is contained within all of them. - # Note: This use of all() here means that an empty set of specifiers - # will always return True, this is an explicit design decision. - return all( - s.contains(item, prereleases=prereleases) - for s in self._specs - ) - - def filter(self, iterable, prereleases=None): - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # If we have any specifiers, then we want to wrap our iterable in the - # filter method for each one, this will act as a logical AND amongst - # each specifier. - if self._specs: - for spec in self._specs: - iterable = spec.filter(iterable, prereleases=bool(prereleases)) - return iterable - # If we do not have any specifiers, then we need to have a rough filter - # which will filter out any pre-releases, unless there are no final - # releases, and which will filter out LegacyVersion in general. - else: - filtered = [] - found_prereleases = [] - - for item in iterable: - # Ensure that we some kind of Version class for this item. - if not isinstance(item, (LegacyVersion, Version)): - parsed_version = parse(item) - else: - parsed_version = item - - # Filter out any item which is parsed as a LegacyVersion - if isinstance(parsed_version, LegacyVersion): - continue - - # Store any item which is a pre-release for later unless we've - # already found a final version or we are accepting prereleases - if parsed_version.is_prerelease and not prereleases: - if not filtered: - found_prereleases.append(item) - else: - filtered.append(item) - - # If we've found no items except for pre-releases, then we'll go - # ahead and use the pre-releases - if not filtered and found_prereleases and prereleases is None: - return found_prereleases - - return filtered diff --git a/pymode/libs/pkg_resources/_vendor/packaging/version.py b/pymode/libs/pkg_resources/_vendor/packaging/version.py deleted file mode 100644 index 4ba574b9..00000000 --- a/pymode/libs/pkg_resources/_vendor/packaging/version.py +++ /dev/null @@ -1,403 +0,0 @@ -# Copyright 2014 Donald Stufft -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import absolute_import, division, print_function - -import collections -import itertools -import re - -from ._structures import Infinity - - -__all__ = [ - "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN" -] - - -_Version = collections.namedtuple( - "_Version", - ["epoch", "release", "dev", "pre", "post", "local"], -) - - -def parse(version): - """ - Parse the given version string and return either a :class:`Version` object - or a :class:`LegacyVersion` object depending on if the given version is - a valid PEP 440 version or a legacy version. - """ - try: - return Version(version) - except InvalidVersion: - return LegacyVersion(version) - - -class InvalidVersion(ValueError): - """ - An invalid version was found, users should refer to PEP 440. - """ - - -class _BaseVersion(object): - - def __hash__(self): - return hash(self._key) - - def __lt__(self, other): - return self._compare(other, lambda s, o: s < o) - - def __le__(self, other): - return self._compare(other, lambda s, o: s <= o) - - def __eq__(self, other): - return self._compare(other, lambda s, o: s == o) - - def __ge__(self, other): - return self._compare(other, lambda s, o: s >= o) - - def __gt__(self, other): - return self._compare(other, lambda s, o: s > o) - - def __ne__(self, other): - return self._compare(other, lambda s, o: s != o) - - def _compare(self, other, method): - if not isinstance(other, _BaseVersion): - return NotImplemented - - return method(self._key, other._key) - - -class LegacyVersion(_BaseVersion): - - def __init__(self, version): - self._version = str(version) - self._key = _legacy_cmpkey(self._version) - - def __str__(self): - return self._version - - def __repr__(self): - return "".format(repr(str(self))) - - @property - def public(self): - return self._version - - @property - def base_version(self): - return self._version - - @property - def local(self): - return None - - @property - def is_prerelease(self): - return False - - @property - def is_postrelease(self): - return False - - -_legacy_version_component_re = re.compile( - r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE, -) - -_legacy_version_replacement_map = { - "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@", -} - - -def _parse_version_parts(s): - for part in _legacy_version_component_re.split(s): - part = _legacy_version_replacement_map.get(part, part) - - if not part or part == ".": - continue - - if part[:1] in "0123456789": - # pad for numeric comparison - yield part.zfill(8) - else: - yield "*" + part - - # ensure that alpha/beta/candidate are before final - yield "*final" - - -def _legacy_cmpkey(version): - # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch - # greater than or equal to 0. This will effectively put the LegacyVersion, - # which uses the defacto standard originally implemented by setuptools, - # as before all PEP 440 versions. - epoch = -1 - - # This scheme is taken from pkg_resources.parse_version setuptools prior to - # it's adoption of the packaging library. - parts = [] - for part in _parse_version_parts(version.lower()): - if part.startswith("*"): - # remove "-" before a prerelease tag - if part < "*final": - while parts and parts[-1] == "*final-": - parts.pop() - - # remove trailing zeros from each series of numeric parts - while parts and parts[-1] == "00000000": - parts.pop() - - parts.append(part) - parts = tuple(parts) - - return epoch, parts - -# Deliberately not anchored to the start and end of the string, to make it -# easier for 3rd party code to reuse -VERSION_PATTERN = r""" - v? - (?: - (?:(?P[0-9]+)!)? # epoch - (?P[0-9]+(?:\.[0-9]+)*) # release segment - (?P
                                              # pre-release
    -            [-_\.]?
    -            (?P(a|b|c|rc|alpha|beta|pre|preview))
    -            [-_\.]?
    -            (?P[0-9]+)?
    -        )?
    -        (?P                                         # post release
    -            (?:-(?P[0-9]+))
    -            |
    -            (?:
    -                [-_\.]?
    -                (?Ppost|rev|r)
    -                [-_\.]?
    -                (?P[0-9]+)?
    -            )
    -        )?
    -        (?P                                          # dev release
    -            [-_\.]?
    -            (?Pdev)
    -            [-_\.]?
    -            (?P[0-9]+)?
    -        )?
    -    )
    -    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
    -"""
    -
    -
    -class Version(_BaseVersion):
    -
    -    _regex = re.compile(
    -        r"^\s*" + VERSION_PATTERN + r"\s*$",
    -        re.VERBOSE | re.IGNORECASE,
    -    )
    -
    -    def __init__(self, version):
    -        # Validate the version and parse it into pieces
    -        match = self._regex.search(version)
    -        if not match:
    -            raise InvalidVersion("Invalid version: '{0}'".format(version))
    -
    -        # Store the parsed out pieces of the version
    -        self._version = _Version(
    -            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
    -            release=tuple(int(i) for i in match.group("release").split(".")),
    -            pre=_parse_letter_version(
    -                match.group("pre_l"),
    -                match.group("pre_n"),
    -            ),
    -            post=_parse_letter_version(
    -                match.group("post_l"),
    -                match.group("post_n1") or match.group("post_n2"),
    -            ),
    -            dev=_parse_letter_version(
    -                match.group("dev_l"),
    -                match.group("dev_n"),
    -            ),
    -            local=_parse_local_version(match.group("local")),
    -        )
    -
    -        # Generate a key which will be used for sorting
    -        self._key = _cmpkey(
    -            self._version.epoch,
    -            self._version.release,
    -            self._version.pre,
    -            self._version.post,
    -            self._version.dev,
    -            self._version.local,
    -        )
    -
    -    def __repr__(self):
    -        return "".format(repr(str(self)))
    -
    -    def __str__(self):
    -        parts = []
    -
    -        # Epoch
    -        if self._version.epoch != 0:
    -            parts.append("{0}!".format(self._version.epoch))
    -
    -        # Release segment
    -        parts.append(".".join(str(x) for x in self._version.release))
    -
    -        # Pre-release
    -        if self._version.pre is not None:
    -            parts.append("".join(str(x) for x in self._version.pre))
    -
    -        # Post-release
    -        if self._version.post is not None:
    -            parts.append(".post{0}".format(self._version.post[1]))
    -
    -        # Development release
    -        if self._version.dev is not None:
    -            parts.append(".dev{0}".format(self._version.dev[1]))
    -
    -        # Local version segment
    -        if self._version.local is not None:
    -            parts.append(
    -                "+{0}".format(".".join(str(x) for x in self._version.local))
    -            )
    -
    -        return "".join(parts)
    -
    -    @property
    -    def public(self):
    -        return str(self).split("+", 1)[0]
    -
    -    @property
    -    def base_version(self):
    -        parts = []
    -
    -        # Epoch
    -        if self._version.epoch != 0:
    -            parts.append("{0}!".format(self._version.epoch))
    -
    -        # Release segment
    -        parts.append(".".join(str(x) for x in self._version.release))
    -
    -        return "".join(parts)
    -
    -    @property
    -    def local(self):
    -        version_string = str(self)
    -        if "+" in version_string:
    -            return version_string.split("+", 1)[1]
    -
    -    @property
    -    def is_prerelease(self):
    -        return bool(self._version.dev or self._version.pre)
    -
    -    @property
    -    def is_postrelease(self):
    -        return bool(self._version.post)
    -
    -
    -def _parse_letter_version(letter, number):
    -    if letter:
    -        # We consider there to be an implicit 0 in a pre-release if there is
    -        # not a numeral associated with it.
    -        if number is None:
    -            number = 0
    -
    -        # We normalize any letters to their lower case form
    -        letter = letter.lower()
    -
    -        # We consider some words to be alternate spellings of other words and
    -        # in those cases we want to normalize the spellings to our preferred
    -        # spelling.
    -        if letter == "alpha":
    -            letter = "a"
    -        elif letter == "beta":
    -            letter = "b"
    -        elif letter in ["c", "pre", "preview"]:
    -            letter = "rc"
    -        elif letter in ["rev", "r"]:
    -            letter = "post"
    -
    -        return letter, int(number)
    -    if not letter and number:
    -        # We assume if we are given a number, but we are not given a letter
    -        # then this is using the implicit post release syntax (e.g. 1.0-1)
    -        letter = "post"
    -
    -        return letter, int(number)
    -
    -
    -_local_version_seperators = re.compile(r"[\._-]")
    -
    -
    -def _parse_local_version(local):
    -    """
    -    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
    -    """
    -    if local is not None:
    -        return tuple(
    -            part.lower() if not part.isdigit() else int(part)
    -            for part in _local_version_seperators.split(local)
    -        )
    -
    -
    -def _cmpkey(epoch, release, pre, post, dev, local):
    -    # When we compare a release version, we want to compare it with all of the
    -    # trailing zeros removed. So we'll use a reverse the list, drop all the now
    -    # leading zeros until we come to something non zero, then take the rest
    -    # re-reverse it back into the correct order and make it a tuple and use
    -    # that for our sorting key.
    -    release = tuple(
    -        reversed(list(
    -            itertools.dropwhile(
    -                lambda x: x == 0,
    -                reversed(release),
    -            )
    -        ))
    -    )
    -
    -    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
    -    # We'll do this by abusing the pre segment, but we _only_ want to do this
    -    # if there is not a pre or a post segment. If we have one of those then
    -    # the normal sorting rules will handle this case correctly.
    -    if pre is None and post is None and dev is not None:
    -        pre = -Infinity
    -    # Versions without a pre-release (except as noted above) should sort after
    -    # those with one.
    -    elif pre is None:
    -        pre = Infinity
    -
    -    # Versions without a post segment should sort before those with one.
    -    if post is None:
    -        post = -Infinity
    -
    -    # Versions without a development segment should sort after those with one.
    -    if dev is None:
    -        dev = Infinity
    -
    -    if local is None:
    -        # Versions without a local segment should sort before those with one.
    -        local = -Infinity
    -    else:
    -        # Versions with a local segment need that segment parsed to implement
    -        # the sorting rules in PEP440.
    -        # - Alpha numeric segments sort before numeric segments
    -        # - Alpha numeric segments sort lexicographically
    -        # - Numeric segments sort numerically
    -        # - Shorter versions sort before longer versions when the prefixes
    -        #   match exactly
    -        local = tuple(
    -            (i, "") if isinstance(i, int) else (-Infinity, i)
    -            for i in local
    -        )
    -
    -    return epoch, release, pre, post, dev, local
    
    From 8e207376e3efa21b190a045f4d7f326dcacd5e3e Mon Sep 17 00:00:00 2001
    From: Diego Rabatone Oliveira 
    Date: Sat, 14 Dec 2019 13:13:05 -0300
    Subject: [PATCH 08/81] Remove _markerlib hardcoded lib
    
    ---
     pymode/libs/_markerlib/__init__.py |  16 ----
     pymode/libs/_markerlib/markers.py  | 119 -----------------------------
     2 files changed, 135 deletions(-)
     delete mode 100644 pymode/libs/_markerlib/__init__.py
     delete mode 100644 pymode/libs/_markerlib/markers.py
    
    diff --git a/pymode/libs/_markerlib/__init__.py b/pymode/libs/_markerlib/__init__.py
    deleted file mode 100644
    index e2b237b1..00000000
    --- a/pymode/libs/_markerlib/__init__.py
    +++ /dev/null
    @@ -1,16 +0,0 @@
    -try:
    -    import ast
    -    from _markerlib.markers import default_environment, compile, interpret
    -except ImportError:
    -    if 'ast' in globals():
    -        raise
    -    def default_environment():
    -        return {}
    -    def compile(marker):
    -        def marker_fn(environment=None, override=None):
    -            # 'empty markers are True' heuristic won't install extra deps.
    -            return not marker.strip()
    -        marker_fn.__doc__ = marker
    -        return marker_fn
    -    def interpret(marker, environment=None, override=None):
    -        return compile(marker)()
    diff --git a/pymode/libs/_markerlib/markers.py b/pymode/libs/_markerlib/markers.py
    deleted file mode 100644
    index fa837061..00000000
    --- a/pymode/libs/_markerlib/markers.py
    +++ /dev/null
    @@ -1,119 +0,0 @@
    -# -*- coding: utf-8 -*-
    -"""Interpret PEP 345 environment markers.
    -
    -EXPR [in|==|!=|not in] EXPR [or|and] ...
    -
    -where EXPR belongs to any of those:
    -
    -    python_version = '%s.%s' % (sys.version_info[0], sys.version_info[1])
    -    python_full_version = sys.version.split()[0]
    -    os.name = os.name
    -    sys.platform = sys.platform
    -    platform.version = platform.version()
    -    platform.machine = platform.machine()
    -    platform.python_implementation = platform.python_implementation()
    -    a free string, like '2.6', or 'win32'
    -"""
    -
    -__all__ = ['default_environment', 'compile', 'interpret']
    -
    -import ast
    -import os
    -import platform
    -import sys
    -import weakref
    -
    -_builtin_compile = compile
    -
    -try:
    -    from platform import python_implementation
    -except ImportError:
    -    if os.name == "java":
    -        # Jython 2.5 has ast module, but not platform.python_implementation() function.
    -        def python_implementation():
    -            return "Jython"
    -    else:
    -        raise
    -
    -
    -# restricted set of variables
    -_VARS = {'sys.platform': sys.platform,
    -         'python_version': '%s.%s' % sys.version_info[:2],
    -         # FIXME parsing sys.platform is not reliable, but there is no other
    -         # way to get e.g. 2.7.2+, and the PEP is defined with sys.version
    -         'python_full_version': sys.version.split(' ', 1)[0],
    -         'os.name': os.name,
    -         'platform.version': platform.version(),
    -         'platform.machine': platform.machine(),
    -         'platform.python_implementation': python_implementation(),
    -         'extra': None # wheel extension
    -        }
    -
    -for var in list(_VARS.keys()):
    -    if '.' in var:
    -        _VARS[var.replace('.', '_')] = _VARS[var]
    -
    -def default_environment():
    -    """Return copy of default PEP 385 globals dictionary."""
    -    return dict(_VARS)
    -
    -class ASTWhitelist(ast.NodeTransformer):
    -    def __init__(self, statement):
    -        self.statement = statement # for error messages
    -
    -    ALLOWED = (ast.Compare, ast.BoolOp, ast.Attribute, ast.Name, ast.Load, ast.Str)
    -    # Bool operations
    -    ALLOWED += (ast.And, ast.Or)
    -    # Comparison operations
    -    ALLOWED += (ast.Eq, ast.Gt, ast.GtE, ast.In, ast.Is, ast.IsNot, ast.Lt, ast.LtE, ast.NotEq, ast.NotIn)
    -
    -    def visit(self, node):
    -        """Ensure statement only contains allowed nodes."""
    -        if not isinstance(node, self.ALLOWED):
    -            raise SyntaxError('Not allowed in environment markers.\n%s\n%s' %
    -                               (self.statement,
    -                               (' ' * node.col_offset) + '^'))
    -        return ast.NodeTransformer.visit(self, node)
    -
    -    def visit_Attribute(self, node):
    -        """Flatten one level of attribute access."""
    -        new_node = ast.Name("%s.%s" % (node.value.id, node.attr), node.ctx)
    -        return ast.copy_location(new_node, node)
    -
    -def parse_marker(marker):
    -    tree = ast.parse(marker, mode='eval')
    -    new_tree = ASTWhitelist(marker).generic_visit(tree)
    -    return new_tree
    -
    -def compile_marker(parsed_marker):
    -    return _builtin_compile(parsed_marker, '', 'eval',
    -                   dont_inherit=True)
    -
    -_cache = weakref.WeakValueDictionary()
    -
    -def compile(marker):
    -    """Return compiled marker as a function accepting an environment dict."""
    -    try:
    -        return _cache[marker]
    -    except KeyError:
    -        pass
    -    if not marker.strip():
    -        def marker_fn(environment=None, override=None):
    -            """"""
    -            return True
    -    else:
    -        compiled_marker = compile_marker(parse_marker(marker))
    -        def marker_fn(environment=None, override=None):
    -            """override updates environment"""
    -            if override is None:
    -                override = {}
    -            if environment is None:
    -                environment = default_environment()
    -            environment.update(override)
    -            return eval(compiled_marker, environment)
    -    marker_fn.__doc__ = marker
    -    _cache[marker] = marker_fn
    -    return _cache[marker]
    -
    -def interpret(marker, environment=None):
    -    return compile(marker)(environment)
    
    From 045888ecccb8fb8db18a293cf6f0160e10fb20f9 Mon Sep 17 00:00:00 2001
    From: Diego Rabatone Oliveira 
    Date: Sat, 14 Dec 2019 13:13:43 -0300
    Subject: [PATCH 09/81] Remove easy_install hardcoded module
    
    ---
     pymode/libs/easy_install.py | 5 -----
     1 file changed, 5 deletions(-)
     delete mode 100644 pymode/libs/easy_install.py
    
    diff --git a/pymode/libs/easy_install.py b/pymode/libs/easy_install.py
    deleted file mode 100644
    index d87e9840..00000000
    --- a/pymode/libs/easy_install.py
    +++ /dev/null
    @@ -1,5 +0,0 @@
    -"""Run the EasyInstall command"""
    -
    -if __name__ == '__main__':
    -    from setuptools.command.easy_install import main
    -    main()
    
    From b8c9acffd1c000a1ded26830cbbc142341bf118c Mon Sep 17 00:00:00 2001
    From: Diego Rabatone Oliveira 
    Date: Tue, 20 Mar 2018 02:29:59 -0300
    Subject: [PATCH 10/81] Improve user_input's input/raw_input compatibility.
    
    This PR add the changes from #723 with the necessary rebase.
    
    Quoting the PR comment:
    
        I didn't like having to add a prompt message when using python's
        input and running it with PymodeRun
    
            * Allow default prompt ('' empty string)
            * Fix quoting (input('"') would crash because it wasn't escaped)
            * Strip whitespace to avoid duplicate spaces
    
        Caveats:
        strips all leading and trailing whitespace from prefix and msg still
        doesn't write the prompt to stdout like input/raw_input does (to fix
        that one would need a separate function for replacing
        input/raw_input)
    
    Close 723
    ---
     pymode/environment.py | 19 +++++++++++++------
     1 file changed, 13 insertions(+), 6 deletions(-)
    
    diff --git a/pymode/environment.py b/pymode/environment.py
    index 5ac4d512..30ae0e50 100644
    --- a/pymode/environment.py
    +++ b/pymode/environment.py
    @@ -84,24 +84,31 @@ def message(msg, history=False):
     
             return vim.command('call pymode#wide_message("%s")' % str(msg))
     
    -    def user_input(self, msg, default=''):
    +    def user_input(self, msg='', default=''):
             """Return user input or default.
     
             :return str:
     
             """
    -        msg = '%s %s ' % (self.prefix, msg)
    +        prompt = []
    +        prompt.append(str(self.prefix.strip()))
    +        prompt.append(str(msg).strip())
     
             if default != '':
    -            msg += '[%s] ' % default
    +            prompt.append('[%s]' % default)
    +
    +        prompt.append('> ')
    +        prompt = ' '.join([s for s in prompt if s])
    +
    +        vim.command('echohl Debug')
     
             try:
    -            vim.command('echohl Debug')
    -            input_str = vim.eval('input("%s> ")' % msg)
    -            vim.command('echohl none')
    +            input_str = vim.eval('input(%r)' % (prompt,))
             except KeyboardInterrupt:
                 input_str = ''
     
    +        vim.command('echohl none')
    +
             return input_str or default
     
         def user_confirm(self, msg, yes=False):
    
    From a844ee4c0dd6893380cb61748118a7f77ff8dda5 Mon Sep 17 00:00:00 2001
    From: Diego Rabatone Oliveira 
    Date: Sat, 14 Dec 2019 13:52:04 -0300
    Subject: [PATCH 11/81] Updating readme
    
    ---
     readme.md | 24 +++++++++++++++---------
     1 file changed, 15 insertions(+), 9 deletions(-)
    
    diff --git a/readme.md b/readme.md
    index 7d6749ec..5e7adb1a 100644
    --- a/readme.md
    +++ b/readme.md
    @@ -14,16 +14,22 @@
     -------------------------------------------------------------------------------
     
     

    - +

    -***Important***: From 2017-11-19 onwards python-mode uses submodules instead of -hard coding 3rd party libraries into its codebase. Please issue the command: -`git submodule update --init --recursive` -inside your python-mode folder. +***Important notes***: + + * From 2017-11-19 onwards python-mode uses submodules instead of + hard coding 3rd party libraries into its codebase. Please issue the command: + `git submodule update --init --recursive` inside your python-mode folder. + + * From 2019-12-14 onwards `python-mode` **dropped python2 suuport**. If you + still need to use it with python2 you should look for the `last-py2-support` + branch and/or tag. If you are a new user please clone the repos using the recursive flag: -`git clone --recurse-submodules https://github.com/python-mode/python-mode` + +> git clone --recurse-submodules https://github.com/python-mode/python-mode ------------------------------------------------------------------------------- @@ -50,7 +56,7 @@ Why Python-mode? The plugin contains all you need to develop python applications in Vim. -* Support Python version 2.6+ and 3.2+ +* Support Python and 3.6+ * Syntax highlighting * Virtualenv support * Run python code (`r`) @@ -75,7 +81,7 @@ Another old presentation here: . # Requirements -Vim >= 7.3 (most features needed +python or +python3 support) (also +Vim >= 7.3 (most features needed +python3 support) (also `--with-features=big` if you want `g:pymode_lint_signs`). # How to install @@ -152,7 +158,7 @@ Nevertheless just a refresher on how to submit bugs: Clear all python cache/compiled files (`*.pyc` files and `__pycache__` directory and everything under it). In Linux/Unix/MacOS you can run: -`find . -type f -name '*.pyc' -delete && find . -type d -name '__pycache__' -delete` +`find . -type f -iname '*.pyc' -o -iname '*.pyo' -delete && find . -type d -name '__pycache__' -delete` Then start python mode with: From 4671530eba489034897280ca1ad521a8173d9e7d Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 14 Dec 2019 14:25:22 -0300 Subject: [PATCH 12/81] Create FUNDING.yml --- .github/FUNDING.yml | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 .github/FUNDING.yml diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 00000000..8847bdf2 --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1,3 @@ +# These are supported funding model platforms + +liberapay: diraol From e8a20432725211ee9ddded33b10ea46a333810e3 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 14 Dec 2019 14:35:46 -0300 Subject: [PATCH 13/81] Updating travis config --- .travis.yml | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index b8b86982..057e0745 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,19 +1,18 @@ # Build vim from source with python3 support and execute tests. -dist: trusty +dist: bionic sudo: required branches: only: - develop - - dev_unstable before_install: - export ORIGINAL_FOLDER=$PWD - sudo apt update - - sudo apt install -yqq libncurses5-dev libgnome2-dev libgnomeui-dev libgtk2.0-dev libatk1.0-dev libbonoboui2-dev libcairo2-dev libx11-dev libxpm-dev libxt-dev python-dev python3-dev lua5.1 lua5.1-dev libperl-dev git + - sudo apt install -yqq libncurses5-dev libatk1.0-dev libbonoboui2-dev python-dev python3-dev lua5.1 lua5.1-dev libperl-dev git - sudo apt remove --purge vim vim-runtime gvim - cd /tmp - git clone https://github.com/vim/vim.git - cd vim - - ./configure --with-features=huge --enable-multibyte --enable-python3interp=yes --with-python3-config-dir=/usr/lib/python3.5/config --enable-perlinterp=yes --enable-luainterp=yes --enable-gui=gtk2 --enable-cscope --prefix=/usr/local + - ./configure --with-features=huge --enable-multibyte --enable-python3interp=yes --with-python3-config-dir=/usr/lib/python3.6/config --enable-perlinterp=yes --enable-luainterp=yes --enable-cscope --prefix=/usr/local - sudo make && sudo make install - cd $ORIGINAL_FOLDER install: git clone --recurse-submodules https://github.com/python-mode/python-mode From bac858ff061e8d9f852917f922fc5988d1d7ec8b Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 14 Dec 2019 14:57:05 -0300 Subject: [PATCH 14/81] [workflow] Testing github workflow --- .github/workflows/test_pymode.yml | 36 +++++++++++++ .travis.yml | 19 ------- tests/utils/pymoderc | 90 +++++++++++++++++++++++++++++++ tests/utils/vimrc | 22 ++++++++ 4 files changed, 148 insertions(+), 19 deletions(-) create mode 100644 .github/workflows/test_pymode.yml delete mode 100644 .travis.yml create mode 100644 tests/utils/pymoderc create mode 100644 tests/utils/vimrc diff --git a/.github/workflows/test_pymode.yml b/.github/workflows/test_pymode.yml new file mode 100644 index 00000000..7b6bab70 --- /dev/null +++ b/.github/workflows/test_pymode.yml @@ -0,0 +1,36 @@ +name: Testing python-mode + +on: [push] + +jobs: + test-python-3_6: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - name: Install dependencies + run: | + sudo apt update + sudo apt install -yqq libncurses5-dev libatk1.0-dev python-dev python3-dev lua5.1 lua5.1-dev libperl-dev git + sudo apt remove --purge vim vim-runtime gvim + - name: build and install vim from source + working-directory: /tmp + run: | + git clone https://github.com/vim/vim.git + cd vim + ./configure --with-features=huge --enable-multibyte --enable-python3interp=yes --with-python3-config-dir=/usr/lib/python3.6/config-3.6m-x86_64-linux-gnu --enable-perlinterp=yes --enable-luainterp=yes --enable-cscope --prefix=/usr/local + sudo make && sudo make install + - name: Install python-mode + run: | + export PYMODE_DIR="${HOME}/work/python-mode/python-mode" + mkdir -p ${HOME}/.vim/pack/foo/start/ + ln -s ${PYMODE_DIR} ${HOME}/.vim/pack/foo/start/python-mode + cp ${PYMODE_DIR}/tests/utils/pymoderc ${HOME}/.pymoderc + cp ${PYMODE_DIR}/tests/utils/vimrc ${HOME}/.vimrc + touch ${HOME}/.vimrc.before ${HOME}/.vimrc.after + - name: Run python-mode test script + run: | + alias python=python3 + cd ${HOME}/work/python-mode/python-mode + git submodule update --init --recursive + git submodule sync + bash tests/test.sh diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 057e0745..00000000 --- a/.travis.yml +++ /dev/null @@ -1,19 +0,0 @@ -# Build vim from source with python3 support and execute tests. -dist: bionic -sudo: required -branches: - only: - - develop -before_install: - - export ORIGINAL_FOLDER=$PWD - - sudo apt update - - sudo apt install -yqq libncurses5-dev libatk1.0-dev libbonoboui2-dev python-dev python3-dev lua5.1 lua5.1-dev libperl-dev git - - sudo apt remove --purge vim vim-runtime gvim - - cd /tmp - - git clone https://github.com/vim/vim.git - - cd vim - - ./configure --with-features=huge --enable-multibyte --enable-python3interp=yes --with-python3-config-dir=/usr/lib/python3.6/config --enable-perlinterp=yes --enable-luainterp=yes --enable-cscope --prefix=/usr/local - - sudo make && sudo make install - - cd $ORIGINAL_FOLDER -install: git clone --recurse-submodules https://github.com/python-mode/python-mode -script: vim --version && cd ./tests && bash -x ./test.sh diff --git a/tests/utils/pymoderc b/tests/utils/pymoderc new file mode 100644 index 00000000..222c6ceb --- /dev/null +++ b/tests/utils/pymoderc @@ -0,0 +1,90 @@ +" These are all pymode configs. You can read about them using :help pymode +let g:pymode = 1 +let g:pymode_warnings = 1 +let g:pymode_paths = [] +let g:pymode_trim_whitespaces = 1 +let g:pymode_options = 1 +let g:pymode_options_max_line_length = 79 +let g:pymode_options_colorcolumn = 1 +let g:pymode_quickfix_minheight = 3 +let g:pymode_quickfix_maxheight = 6 +let g:pymode_indent = 1 +let g:pymode_folding = 0 +let g:pymode_motion = 1 +let g:pymode_doc = 1 +let g:pymode_doc_bind = 'K' +let g:pymode_virtualenv = 1 +let g:pymode_virtualenv_path = $VIRTUAL_ENV +let g:pymode_run = 1 +let g:pymode_run_bind = 'r' +let g:pymode_breakpoint = 1 +let g:pymode_breakpoint_bind = 'b' +let g:pymode_breakpoint_cmd = '' +let g:pymode_lint = 1 +let g:pymode_lint_on_write = 1 +let g:pymode_lint_unmodified = 0 +let g:pymode_lint_on_fly = 0 +let g:pymode_lint_message = 1 +let g:pymode_lint_checkers = ['pyflakes', 'pep8', 'mccabe'] +let g:pymode_lint_ignore = ["E501", "W",] +let g:pymode_lint_select = ["E501", "W0011", "W430"] +let g:pymode_lint_sort = [] +let g:pymode_lint_cwindow = 1 +let g:pymode_lint_signs = 1 +let g:pymode_lint_todo_symbol = 'WW' +let g:pymode_lint_comment_symbol = 'CC' +let g:pymode_lint_visual_symbol = 'RR' +let g:pymode_lint_error_symbol = 'EE' +let g:pymode_lint_info_symbol = 'II' +let g:pymode_lint_pyflakes_symbol = 'FF' +let g:pymode_lint_options_pep8 = + \ {'max_line_length': g:pymode_options_max_line_length} +let g:pymode_lint_options_pyflakes = { 'builtins': '_' } +let g:pymode_lint_options_mccabe = { 'complexity': 12 } +let g:pymode_lint_options_pep257 = {} +let g:pymode_lint_options_pylint = + \ {'max-line-length': g:pymode_options_max_line_length} +let g:pymode_rope = 1 +let g:pymode_rope_lookup_project = 0 +let g:pymode_rope_project_root = "" +let g:pymode_rope_ropefolder='.ropeproject' +let g:pymode_rope_show_doc_bind = 'd' +let g:pymode_rope_regenerate_on_write = 1 +let g:pymode_rope_completion = 1 +let g:pymode_rope_complete_on_dot = 1 +let g:pymode_rope_completion_bind = '' +let g:pymode_rope_autoimport = 0 +let g:pymode_rope_autoimport_modules = ['os', 'shutil', 'datetime'] +let g:pymode_rope_autoimport_import_after_complete = 0 +let g:pymode_rope_goto_definition_bind = 'g' +let g:pymode_rope_goto_definition_cmd = 'new' +let g:pymode_rope_rename_bind = 'rr' +let g:pymode_rope_rename_module_bind = 'r1r' +let g:pymode_rope_organize_imports_bind = 'ro' +let g:pymode_rope_autoimport_bind = 'ra' +let g:pymode_rope_module_to_package_bind = 'r1p' +let g:pymode_rope_extract_method_bind = 'rm' +let g:pymode_rope_extract_variable_bind = 'rl' +let g:pymode_rope_use_function_bind = 'ru' +let g:pymode_rope_move_bind = 'rv' +let g:pymode_rope_change_signature_bind = 'rs' +let g:pymode_syntax = 1 +let g:pymode_syntax_slow_sync = 1 +let g:pymode_syntax_all = 1 +let g:pymode_syntax_print_as_function = 0 +let g:pymode_syntax_highlight_async_await = g:pymode_syntax_all +let g:pymode_syntax_highlight_equal_operator = g:pymode_syntax_all +let g:pymode_syntax_highlight_stars_operator = g:pymode_syntax_all +let g:pymode_syntax_highlight_self = g:pymode_syntax_all +let g:pymode_syntax_indent_errors = g:pymode_syntax_all +let g:pymode_syntax_space_errors = g:pymode_syntax_all +let g:pymode_syntax_string_formatting = g:pymode_syntax_all +let g:pymode_syntax_string_format = g:pymode_syntax_all +let g:pymode_syntax_string_templates = g:pymode_syntax_all +let g:pymode_syntax_doctests = g:pymode_syntax_all +let g:pymode_syntax_builtin_objs = g:pymode_syntax_all +let g:pymode_syntax_builtin_types = g:pymode_syntax_all +let g:pymode_syntax_highlight_exceptions = g:pymode_syntax_all +let g:pymode_syntax_docstrings = g:pymode_syntax_all + +" vim:tw=79:ts=8:ft=help:norl: diff --git a/tests/utils/vimrc b/tests/utils/vimrc new file mode 100644 index 00000000..6920a0bb --- /dev/null +++ b/tests/utils/vimrc @@ -0,0 +1,22 @@ +source /root/.vimrc.before +source /root/.pymoderc + +syntax on +filetype plugin indent on +set shortmess=at +set cmdheight=10 +set ft=python +set shell=bash +set rtp+=/root/.vim/pack/foo/start/python-mode +set term=xterm-256color +set wrap " visually wrap lines +set smartindent " smart indenting +set shiftwidth=4 " default to two spaces +set tabstop=4 " default to two spaces +set softtabstop=4 " default to two spaces +set shiftround " always round indentation to shiftwidth +set mouse= " disable mouse +set expandtab +set backspace=indent,eol,start + +source /root/.vimrc.after From 7678045d6d5f9fe16ca52d3bc327072ad3a80d58 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 14 Dec 2019 17:27:42 -0300 Subject: [PATCH 15/81] [submodules] Updating astroid to 2.3.3 --- submodules/astroid | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/submodules/astroid b/submodules/astroid index 5b5cd7ac..ace7b296 160000 --- a/submodules/astroid +++ b/submodules/astroid @@ -1 +1 @@ -Subproject commit 5b5cd7acbecaa9b587b07de27a3334a2ec4f2a79 +Subproject commit ace7b2967ea762ec43fc7be8ab9c8007564d9be2 From a0b38b3b4cd785d10e99646dba009e9e8fbae5c1 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 14 Dec 2019 17:28:38 -0300 Subject: [PATCH 16/81] [submodules] Updating autopep8 to 1.4.4 --- submodules/autopep8 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/submodules/autopep8 b/submodules/autopep8 index 159bb888..fda3bb39 160000 --- a/submodules/autopep8 +++ b/submodules/autopep8 @@ -1 +1 @@ -Subproject commit 159bb88843e298534e46914da242e680a1c8c47d +Subproject commit fda3bb39181437b6b8a0aa0185f21ae5f14385dd From 7ee1037b0d5755bdac88f9789169d0cc4b87b465 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 14 Dec 2019 17:30:23 -0300 Subject: [PATCH 17/81] [submodules] Updating pycodestyle to 2.5.0 --- submodules/pycodestyle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/submodules/pycodestyle b/submodules/pycodestyle index 566cdc0c..e91ef6e4 160000 --- a/submodules/pycodestyle +++ b/submodules/pycodestyle @@ -1 +1 @@ -Subproject commit 566cdc0cb22e5530902e456d0b315403ebab980c +Subproject commit e91ef6e40f2be30f9af7a86a84255d6bdfe23f51 From 0dd80d28d100ada1d159dba8f4f8eed543fd76a2 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 14 Dec 2019 17:31:27 -0300 Subject: [PATCH 18/81] [submodules] Updating pydocstyle to 5.0.1 --- submodules/pydocstyle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/submodules/pydocstyle b/submodules/pydocstyle index eea4ca17..05e62056 160000 --- a/submodules/pydocstyle +++ b/submodules/pydocstyle @@ -1 +1 @@ -Subproject commit eea4ca179553189a7b8a62d6085f15b50bb98e35 +Subproject commit 05e6205630569452fa13799783641153ea6cb7c6 From ca267ce66a6d02d7f7387f56fb0f326d7213a33e Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 14 Dec 2019 17:31:54 -0300 Subject: [PATCH 19/81] [submodules] Update pyflakes to 2.1.1 --- submodules/pyflakes | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/submodules/pyflakes b/submodules/pyflakes index 45fc7324..6501af45 160000 --- a/submodules/pyflakes +++ b/submodules/pyflakes @@ -1 +1 @@ -Subproject commit 45fc732466056fe35c85936ff25491df7905c597 +Subproject commit 6501af45203dfa3e2d422cfb3ebbecff853db47f From 527f62d6592bd8714eb2bb4c44c3c106acf0bbe7 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 14 Dec 2019 17:32:23 -0300 Subject: [PATCH 20/81] [submodules] Update pylama to 7.7.1 --- submodules/pylama | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/submodules/pylama b/submodules/pylama index 837ecd3d..f436ccc6 160000 --- a/submodules/pylama +++ b/submodules/pylama @@ -1 +1 @@ -Subproject commit 837ecd3d7a8597ab5f28bc83072de68e16470f1e +Subproject commit f436ccc6b55b33381a295ded753e467953cf4379 From 8b5a9a9a23afc1cdacf9e0b8703da338bd973ba6 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 14 Dec 2019 17:33:04 -0300 Subject: [PATCH 21/81] [submodules] Update pylint to 2.4.4 --- submodules/pylint | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/submodules/pylint b/submodules/pylint index 66cb3218..d0a597b3 160000 --- a/submodules/pylint +++ b/submodules/pylint @@ -1 +1 @@ -Subproject commit 66cb32187c040f82dd067bc0d226b2f105bf6c38 +Subproject commit d0a597b34a0e39a7dd64cdf685f3147f147f52a4 From 7dc16e8cea03ea01fc2d59a70e0334ee4c041241 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 14 Dec 2019 17:33:51 -0300 Subject: [PATCH 22/81] [submodules] Update rope to 0.14.0 --- submodules/rope | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/submodules/rope b/submodules/rope index 95aa2749..d8b70ae7 160000 --- a/submodules/rope +++ b/submodules/rope @@ -1 +1 @@ -Subproject commit 95aa2749f978d579fda03478dece4d611c2323f9 +Subproject commit d8b70ae76f160403b51b2291a112c11505298a24 From 5eb8c20de41eb5cfb5d039da0f3104d3b928d285 Mon Sep 17 00:00:00 2001 From: Tim Gates Date: Wed, 18 Dec 2019 16:57:13 +1100 Subject: [PATCH 23/81] Fix simple typo: systematicaly -> systematically Closes #1060 --- tests/test_python_sample_code/algorithms.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_python_sample_code/algorithms.py b/tests/test_python_sample_code/algorithms.py index 80211f4e..cdd2e81f 100644 --- a/tests/test_python_sample_code/algorithms.py +++ b/tests/test_python_sample_code/algorithms.py @@ -60,7 +60,7 @@ def varAnd(population, toolbox, cxpb, mutpb): This variation is named *And* beceause of its propention to apply both crossover and mutation on the individuals. Note that both operators are - not applied systematicaly, the resulting individuals can be generated from + not applied systematically, the resulting individuals can be generated from crossover only, mutation only, crossover and mutation, and reproduction according to the given probabilities. Both probabilities should be in :math:`[0, 1]`. From 1e97e4d8fa427f7b3262b2daadff8d32ca5be536 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sun, 26 Jan 2020 09:48:52 -0300 Subject: [PATCH 24/81] Updating submodules --- submodules/autopep8 | 2 +- submodules/mccabe | 2 +- submodules/pydocstyle | 2 +- submodules/rope | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/submodules/autopep8 b/submodules/autopep8 index fda3bb39..107e29dc 160000 --- a/submodules/autopep8 +++ b/submodules/autopep8 @@ -1 +1 @@ -Subproject commit fda3bb39181437b6b8a0aa0185f21ae5f14385dd +Subproject commit 107e29dce22c7b367a36633a78735278e4ad4288 diff --git a/submodules/mccabe b/submodules/mccabe index c2f5b386..f318ade8 160000 --- a/submodules/mccabe +++ b/submodules/mccabe @@ -1 +1 @@ -Subproject commit c2f5b386458cfda0aa4239f4d11b4e5e75027bda +Subproject commit f318ade8d139a3412c29bf992f447f1f1f8b3d83 diff --git a/submodules/pydocstyle b/submodules/pydocstyle index 05e62056..59396eb5 160000 --- a/submodules/pydocstyle +++ b/submodules/pydocstyle @@ -1 +1 @@ -Subproject commit 05e6205630569452fa13799783641153ea6cb7c6 +Subproject commit 59396eb50d1d1a59fdccdd71cf4031577c02ab54 diff --git a/submodules/rope b/submodules/rope index d8b70ae7..a1e77083 160000 --- a/submodules/rope +++ b/submodules/rope @@ -1 +1 @@ -Subproject commit d8b70ae76f160403b51b2291a112c11505298a24 +Subproject commit a1e77083a47370ddc9dcd7707c76ddb12c47a323 From 30a73d861aba2e1a5e0693d3a3df30293182efe2 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 15 Feb 2020 16:56:42 -0300 Subject: [PATCH 25/81] Updating readme with debug/faq issues --- readme.md | 26 +++++++++++++++++++++----- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/readme.md b/readme.md index 5e7adb1a..38f49c7e 100644 --- a/readme.md +++ b/readme.md @@ -156,9 +156,11 @@ Nevertheless just a refresher on how to submit bugs: **(From the FAQ)** Clear all python cache/compiled files (`*.pyc` files and `__pycache__` -directory and everything under it). In Linux/Unix/MacOS you can run: +directory and everything under it) from your _python-mode_ install directory. -`find . -type f -iname '*.pyc' -o -iname '*.pyo' -delete && find . -type d -name '__pycache__' -delete` +In Linux/Unix/MacOS you can run: + +`find -type f -iname '*.pyc' -o -iname '*.pyo' -delete && find . -type d -name '__pycache__' -delete` Then start python mode with: @@ -182,14 +184,28 @@ Please, also provide more contextual information such as: * the python version that vim has loaded in your tests: * `:PymodePython import sys; print(sys.version_info)` output. * and if you are using virtualenvs and/or conda, also state that, please. +* It would be good also to provide the output of the two following commands: +* `git status` (under your _python-mode_ directory) +* `tree ` or something similar (such as `ls -lR`) # Frequent problems Read this section before opening an issue on the tracker. +## Python 2/3 vim support + +Vim [has issues](https://github.com/vim/vim/issues/3585) to work with both +python2 and python3 at the same time, so if your VIM is compiled with support +to both version you may find problems. The best way to handle it is to build +your vim again with only python3 support. +[Here](https://github.com/ycm-core/YouCompleteMe/wiki/Building-Vim-from-source) +is a good reference on how to build vim from source. + ## Python 3 syntax -By default python-mode uses python 3 syntax checking. +`python-mode` supports only python3, so, if you are using python2 we cannot +help you that much. Look for our branch with python2-support (old version, +not maintained anymore) (`last-py2-support`). ## Symlinks on Windows @@ -208,8 +224,8 @@ Then we probably changed some repo reference or some of our dependencies had a `git push --force` in its git history. So the best way for you to handle it is to run, inside the `python-mode` directory: -`git submodule update --recursive --init --force` -`git submodule sync --recursive` +* `git submodule update --recursive --init --force` +* `git submodule sync --recursive` # Documentation From 0c4f490d6cb4aec28d5f679916760bf414bbab56 Mon Sep 17 00:00:00 2001 From: GYoung Date: Wed, 26 Feb 2020 13:45:40 +0800 Subject: [PATCH 26/81] Update readme.md Correct word spelling --- readme.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/readme.md b/readme.md index 38f49c7e..47660e45 100644 --- a/readme.md +++ b/readme.md @@ -23,7 +23,7 @@ hard coding 3rd party libraries into its codebase. Please issue the command: `git submodule update --init --recursive` inside your python-mode folder. - * From 2019-12-14 onwards `python-mode` **dropped python2 suuport**. If you + * From 2019-12-14 onwards `python-mode` **dropped python2 support**. If you still need to use it with python2 you should look for the `last-py2-support` branch and/or tag. From 4ea05b638d65276cd08ae7b25532b9e516b5a2d5 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Thu, 16 Apr 2020 13:18:58 -0300 Subject: [PATCH 27/81] Don't dowload whole history of submodules --- .gitmodules | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.gitmodules b/.gitmodules index ada9193a..15a5dd75 100644 --- a/.gitmodules +++ b/.gitmodules @@ -2,36 +2,46 @@ path = submodules/autopep8 url = https://github.com/hhatto/autopep8 ignore = dirty + shallow = true [submodule "submodules/pycodestyle"] path = submodules/pycodestyle url = https://github.com/PyCQA/pycodestyle ignore = dirty + shallow = true [submodule "submodules/pydocstyle"] path = submodules/pydocstyle url = https://github.com/PyCQA/pydocstyle ignore = dirty + shallow = true [submodule "submodules/mccabe"] path = submodules/mccabe url = https://github.com/PyCQA/mccabe ignore = dirty + shallow = true [submodule "submodules/pyflakes"] path = submodules/pyflakes url = https://github.com/PyCQA/pyflakes ignore = dirty + shallow = true [submodule "submodules/snowball_py"] path = submodules/snowball_py url = https://github.com/diraol/snowball_py ignore = dirty branch = develop + shallow = true [submodule "submodules/pylint"] path = submodules/pylint url = https://github.com/PyCQA/pylint + shallow = true [submodule "submodules/rope"] path = submodules/rope url = https://github.com/python-rope/rope + shallow = true [submodule "submodules/astroid"] path = submodules/astroid url = https://github.com/PyCQA/astroid + shallow = true [submodule "submodules/pylama"] path = submodules/pylama url = https://github.com/klen/pylama + shallow = true From e117b43080151da6cf51852d57bdabea307834aa Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Thu, 16 Apr 2020 13:18:58 -0300 Subject: [PATCH 28/81] Don't dowload whole history of submodules --- .gitmodules | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.gitmodules b/.gitmodules index ada9193a..15a5dd75 100644 --- a/.gitmodules +++ b/.gitmodules @@ -2,36 +2,46 @@ path = submodules/autopep8 url = https://github.com/hhatto/autopep8 ignore = dirty + shallow = true [submodule "submodules/pycodestyle"] path = submodules/pycodestyle url = https://github.com/PyCQA/pycodestyle ignore = dirty + shallow = true [submodule "submodules/pydocstyle"] path = submodules/pydocstyle url = https://github.com/PyCQA/pydocstyle ignore = dirty + shallow = true [submodule "submodules/mccabe"] path = submodules/mccabe url = https://github.com/PyCQA/mccabe ignore = dirty + shallow = true [submodule "submodules/pyflakes"] path = submodules/pyflakes url = https://github.com/PyCQA/pyflakes ignore = dirty + shallow = true [submodule "submodules/snowball_py"] path = submodules/snowball_py url = https://github.com/diraol/snowball_py ignore = dirty branch = develop + shallow = true [submodule "submodules/pylint"] path = submodules/pylint url = https://github.com/PyCQA/pylint + shallow = true [submodule "submodules/rope"] path = submodules/rope url = https://github.com/python-rope/rope + shallow = true [submodule "submodules/astroid"] path = submodules/astroid url = https://github.com/PyCQA/astroid + shallow = true [submodule "submodules/pylama"] path = submodules/pylama url = https://github.com/klen/pylama + shallow = true From 0fa798dd9ac2cfbcea707254db41489f1194ce9a Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Tue, 21 Apr 2020 11:08:59 -0300 Subject: [PATCH 29/81] Updating Authors --- AUTHORS | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/AUTHORS b/AUTHORS index fd56f319..926c2737 100644 --- a/AUTHORS +++ b/AUTHORS @@ -4,7 +4,7 @@ Author: Maintainers: -* Felipe M. Vieira (https://github.com/fmv1992) +* Diego Rabatone Oliveira (https://github.com/diraol); Contributors: @@ -25,9 +25,9 @@ Contributors: * Daniel Hahler (http://github.com/blueyed); * David Vogt (http://github.com/winged); * Denis Kasak (http://github.com/dkasak); -* Diego Rabatone Oliveira (https://github.com/diraol); * Dimitrios Semitsoglou-Tsiapos (https://github.com/dset0x); * Dirk Wallenstein (http://github.com/dirkwallenstein); +* Felipe M. Vieira (https://github.com/fmv1992) * Filip Poboril (https://github.com/fpob); * Florent Xicluna (http://github.com/florentx); * Fredrik Henrysson (http://github.com/fhenrysson); From 1ae3a3f36fcca2cd4f047b3b284ea88615f410a5 Mon Sep 17 00:00:00 2001 From: Lie Ryan Date: Sat, 29 Jun 2019 20:22:29 +1000 Subject: [PATCH 30/81] Change motion commands to be line-based rather than character-based --- autoload/pymode/motion.vim | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/autoload/pymode/motion.vim b/autoload/pymode/motion.vim index a930f35a..c88fb913 100644 --- a/autoload/pymode/motion.vim +++ b/autoload/pymode/motion.vim @@ -51,7 +51,7 @@ fun! pymode#motion#select(first_pattern, second_pattern, inner) "{{{ endif call cursor(snum, 1) - normal! v + normal! V call cursor(enum, len(getline(enum))) endif endfunction "}}} From 1603c1259a9b8d8eda486b5e1b943163ad9fe7c4 Mon Sep 17 00:00:00 2001 From: Lie Ryan Date: Sat, 29 Jun 2019 21:30:43 +1000 Subject: [PATCH 31/81] Add --clean flag to python-mode tests For some reason, vim will still add packages even when -u is specified. This flag tells vim to avoid loading those packages. This should improve test isolation. --- tests/test_bash/test_autocommands.sh | 4 ++-- tests/test_bash/test_autopep8.sh | 2 +- tests/test_bash/test_folding.sh | 8 ++++---- tests/test_bash/test_pymodelint.sh | 4 ++-- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/tests/test_bash/test_autocommands.sh b/tests/test_bash/test_autocommands.sh index 13708c30..bc46b9d5 100644 --- a/tests/test_bash/test_autocommands.sh +++ b/tests/test_bash/test_autocommands.sh @@ -20,10 +20,10 @@ set +e for ONE_PYMODE_COMMANDS_TEST in "${TEST_PYMODE_COMMANDS_ARRAY[@]}" do echo "Starting test: $0:$ONE_PYMODE_COMMANDS_TEST" >> $VIM_OUTPUT_FILE - RETURN_CODE=$(vim -i NONE -u $VIM_TEST_VIMRC -c "source $ONE_PYMODE_COMMANDS_TEST" $VIM_DISPOSABLE_PYFILE > /dev/null 2>&1) + RETURN_CODE=$(vim --clean -i NONE -u $VIM_TEST_VIMRC -c "source $ONE_PYMODE_COMMANDS_TEST" $VIM_DISPOSABLE_PYFILE > /dev/null 2>&1) ### Enable the following to execute one test at a time. - ### FOR PINPOINT TESTING ### vim -i NONE -u $VIM_TEST_VIMRC -c "source $ONE_PYMODE_COMMANDS_TEST" $VIM_DISPOSABLE_PYFILE + ### FOR PINPOINT TESTING ### vim --clean -i NONE -u $VIM_TEST_VIMRC -c "source $ONE_PYMODE_COMMANDS_TEST" $VIM_DISPOSABLE_PYFILE ### FOR PINPOINT TESTING ### exit 1 RETURN_CODE=$? diff --git a/tests/test_bash/test_autopep8.sh b/tests/test_bash/test_autopep8.sh index eae173f3..05585725 100644 --- a/tests/test_bash/test_autopep8.sh +++ b/tests/test_bash/test_autopep8.sh @@ -2,7 +2,7 @@ # Source file. set +e -RETURN_CODE=$(vim -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/autopep8.vim" $VIM_DISPOSABLE_PYFILE > /dev/null 2>&1) +RETURN_CODE=$(vim --clean -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/autopep8.vim" $VIM_DISPOSABLE_PYFILE > /dev/null 2>&1) RETURN_CODE=$? set -e exit $RETURN_CODE diff --git a/tests/test_bash/test_folding.sh b/tests/test_bash/test_folding.sh index 2902ef8d..d0ac884a 100644 --- a/tests/test_bash/test_folding.sh +++ b/tests/test_bash/test_folding.sh @@ -5,17 +5,17 @@ # Source file. set +e source ./test_helpers_bash/test_prepare_between_tests.sh -vim -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/folding1.vim" $VIM_DISPOSABLE_PYFILE > /dev/null +vim --clean -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/folding1.vim" $VIM_DISPOSABLE_PYFILE > /dev/null R1=$? source ./test_helpers_bash/test_prepare_between_tests.sh -vim -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/folding2.vim" $VIM_DISPOSABLE_PYFILE > /dev/null +vim --clean -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/folding2.vim" $VIM_DISPOSABLE_PYFILE > /dev/null R2=$? source ./test_helpers_bash/test_prepare_between_tests.sh # TODO: enable folding3.vim script back. -# vim -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/folding3.vim" $VIM_DISPOSABLE_PYFILE > /dev/null +# vim --clean -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/folding3.vim" $VIM_DISPOSABLE_PYFILE > /dev/null # R3=$? source ./test_helpers_bash/test_prepare_between_tests.sh -vim -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/folding4.vim" $VIM_DISPOSABLE_PYFILE > /dev/null +vim --clean -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/folding4.vim" $VIM_DISPOSABLE_PYFILE > /dev/null R4=$? set -e diff --git a/tests/test_bash/test_pymodelint.sh b/tests/test_bash/test_pymodelint.sh index cf8d626d..583d0774 100644 --- a/tests/test_bash/test_pymodelint.sh +++ b/tests/test_bash/test_pymodelint.sh @@ -5,8 +5,8 @@ # Source file. set +e -vim -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/pymodelint.vim" $VIM_DISPOSABLE_PYFILE -# RETURN_CODE=$(vim -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/pymodeversion.vim" $VIM_DISPOSABLE_PYFILE > /dev/null 2>&1) +vim --clean -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/pymodelint.vim" $VIM_DISPOSABLE_PYFILE +# RETURN_CODE=$(vim --clean -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/pymodeversion.vim" $VIM_DISPOSABLE_PYFILE > /dev/null 2>&1) # RETURN_CODE=$? set -e # exit $RETURN_CODE From c57eb363d6bca80fa5790a2dacbb6f8b33931fda Mon Sep 17 00:00:00 2001 From: Lie Ryan Date: Sun, 26 Apr 2020 09:17:41 +1000 Subject: [PATCH 32/81] Add test for textobjects selection --- tests/test.sh | 1 + tests/test_bash/test_textobject.sh | 15 ++++++++++ .../test_procedures_vimscript/textobject.vim | 29 +++++++++++++++++++ 3 files changed, 45 insertions(+) create mode 100644 tests/test_bash/test_textobject.sh create mode 100644 tests/test_procedures_vimscript/textobject.vim diff --git a/tests/test.sh b/tests/test.sh index b7747308..fe9fcae1 100755 --- a/tests/test.sh +++ b/tests/test.sh @@ -20,6 +20,7 @@ declare -a TEST_ARRAY=( "./test_bash/test_autopep8.sh" "./test_bash/test_autocommands.sh" "./test_bash/test_folding.sh" + "./test_bash/test_textobject.sh" ) ## now loop through the above array set +e diff --git a/tests/test_bash/test_textobject.sh b/tests/test_bash/test_textobject.sh new file mode 100644 index 00000000..43a799f9 --- /dev/null +++ b/tests/test_bash/test_textobject.sh @@ -0,0 +1,15 @@ +#! /bin/bash + +# Source file. +set +e +source ./test_helpers_bash/test_prepare_between_tests.sh +vim --clean -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/textobject.vim" $VIM_DISPOSABLE_PYFILE > /dev/null +R1=$? +set -e + +if [[ "$R1" -ne 0 ]] +then + exit 1 +fi + +# vim: set fileformat=unix filetype=sh wrap tw=0 : diff --git a/tests/test_procedures_vimscript/textobject.vim b/tests/test_procedures_vimscript/textobject.vim new file mode 100644 index 00000000..cee9f985 --- /dev/null +++ b/tests/test_procedures_vimscript/textobject.vim @@ -0,0 +1,29 @@ +" Load sample python file. +" With 'def'. +execute "normal! idef func1():\ a = 1\" +execute "normal! idef func2():\ b = 2" +normal 3ggdaMggf(P + +" Assert changes. +let content=getline('^', '$') +call assert_true(content == ['def func2():', ' b = 2', 'def func1():', ' a = 1']) + + +" Clean file. +%delete + +" With 'class'. +execute "normal! iclass Class1():\ a = 1\" +execute "normal! iclass Class2():\ b = 2\" +normal 3ggdaCggf(P + +" Assert changes. +let content=getline('^', '$') +call assert_true(content == ['class Class2():', ' b = 2', '', 'class Class1():', ' a = 1']) + + +if len(v:errors) > 0 + cquit! +else + quit! +endif From ff89053fbec653db7587f21634c63955dd00517d Mon Sep 17 00:00:00 2001 From: Lie Ryan Date: Sun, 26 Apr 2020 09:29:25 +1000 Subject: [PATCH 33/81] Add to AUTHORS --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index 926c2737..6c2c6b95 100644 --- a/AUTHORS +++ b/AUTHORS @@ -42,6 +42,7 @@ Contributors: * Kurtis Rader (https://github.com/krader1961); * Lawrence Akka (https://github.com/lawrenceakka); * lee (https://github.com/loyalpartner); +* Lie Ryan (https://github.com/lieryan/); * Lowe Thiderman (http://github.com/thiderman); * Martin Brochhaus (http://github.com/mbrochh); * Matt Dodge (https://github.com/mattdodge); From eda94e24d7d7df47a43eeb8fc60f2e5136a2cc83 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Tue, 21 Apr 2020 10:14:15 -0300 Subject: [PATCH 34/81] Updating submodules astroid, autopep8, mccabe, pyflakes and pylint --- submodules/astroid | 2 +- submodules/autopep8 | 2 +- submodules/mccabe | 2 +- submodules/pyflakes | 2 +- submodules/pylint | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/submodules/astroid b/submodules/astroid index ace7b296..6c8bfb4b 160000 --- a/submodules/astroid +++ b/submodules/astroid @@ -1 +1 @@ -Subproject commit ace7b2967ea762ec43fc7be8ab9c8007564d9be2 +Subproject commit 6c8bfb4b7fb5366a3facfc08020c68ac426b9a8d diff --git a/submodules/autopep8 b/submodules/autopep8 index 107e29dc..84cb34ca 160000 --- a/submodules/autopep8 +++ b/submodules/autopep8 @@ -1 +1 @@ -Subproject commit 107e29dce22c7b367a36633a78735278e4ad4288 +Subproject commit 84cb34cad9b3e6fe520cb5b355eb52e6b6dd5758 diff --git a/submodules/mccabe b/submodules/mccabe index f318ade8..e92e9e79 160000 --- a/submodules/mccabe +++ b/submodules/mccabe @@ -1 +1 @@ -Subproject commit f318ade8d139a3412c29bf992f447f1f1f8b3d83 +Subproject commit e92e9e79799c5796f76f3da821dbb5aa56e41028 diff --git a/submodules/pyflakes b/submodules/pyflakes index 6501af45..c72d6cf1 160000 --- a/submodules/pyflakes +++ b/submodules/pyflakes @@ -1 +1 @@ -Subproject commit 6501af45203dfa3e2d422cfb3ebbecff853db47f +Subproject commit c72d6cf1a9a119c1dd7a7674f36da21aea32d828 diff --git a/submodules/pylint b/submodules/pylint index d0a597b3..089f5106 160000 --- a/submodules/pylint +++ b/submodules/pylint @@ -1 +1 @@ -Subproject commit d0a597b34a0e39a7dd64cdf685f3147f147f52a4 +Subproject commit 089f510623e468c60b2f44365fec3db942591d3d From 6a177993b5f9048fff8e05647dabf5851b596d6d Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 16 May 2020 14:32:32 -0300 Subject: [PATCH 35/81] Update warning message --- ftplugin/python/pymode.vim | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ftplugin/python/pymode.vim b/ftplugin/python/pymode.vim index c13aff71..a1370669 100644 --- a/ftplugin/python/pymode.vim +++ b/ftplugin/python/pymode.vim @@ -5,7 +5,7 @@ endif if g:pymode_python == 'disable' if g:pymode_warning - call pymode#error("Pymode requires vim compiled with +python. Most of features will be disabled.") + call pymode#error("Pymode requires vim compiled with +python3 (exclusively). Most of features will be disabled.") endif finish From aaade0272485bac7fdfd151e209ce2c684fa1919 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 16 May 2020 14:35:14 -0300 Subject: [PATCH 36/81] Updating submodules astroid, pycodestyle, pylint and rope --- submodules/astroid | 2 +- submodules/pycodestyle | 2 +- submodules/pylint | 2 +- submodules/rope | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/submodules/astroid b/submodules/astroid index 6c8bfb4b..a672051f 160000 --- a/submodules/astroid +++ b/submodules/astroid @@ -1 +1 @@ -Subproject commit 6c8bfb4b7fb5366a3facfc08020c68ac426b9a8d +Subproject commit a672051f4406da0beb01e88767e8e46488eb71eb diff --git a/submodules/pycodestyle b/submodules/pycodestyle index e91ef6e4..d4143838 160000 --- a/submodules/pycodestyle +++ b/submodules/pycodestyle @@ -1 +1 @@ -Subproject commit e91ef6e40f2be30f9af7a86a84255d6bdfe23f51 +Subproject commit d414383860c483c57d1fafc12c630b46a5616d3c diff --git a/submodules/pylint b/submodules/pylint index 089f5106..6c6ffc30 160000 --- a/submodules/pylint +++ b/submodules/pylint @@ -1 +1 @@ -Subproject commit 089f510623e468c60b2f44365fec3db942591d3d +Subproject commit 6c6ffc306f9c9614072bcb2c83fefa838bef1102 diff --git a/submodules/rope b/submodules/rope index a1e77083..bc6908a8 160000 --- a/submodules/rope +++ b/submodules/rope @@ -1 +1 @@ -Subproject commit a1e77083a47370ddc9dcd7707c76ddb12c47a323 +Subproject commit bc6908a82b6f0eb7bb248b600adb3367f42714da From b6c2a3f6515474bbd21a1224587d4ca3d5394069 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Thu, 28 May 2020 20:48:20 -0300 Subject: [PATCH 37/81] =?UTF-8?q?Bump=20version:=200.10.0=20=E2=86=92=200.?= =?UTF-8?q?11.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 3 +-- doc/pymode.txt | 2 +- plugin/pymode.vim | 2 +- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 344a1f03..9a13d8b0 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,6 +1,6 @@ [bumpversion] commit = True -current_version = 0.10.0 +current_version = 0.11.0 files = plugin/pymode.vim tag = True tag_name = {new_version} @@ -8,4 +8,3 @@ tag_name = {new_version} [bumpversion:file:doc/pymode.txt] search = Version: {current_version} replace = Version: {new_version} - diff --git a/doc/pymode.txt b/doc/pymode.txt index 2e2b7f98..de98f17b 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -6,7 +6,7 @@ (__) (__) (__) (_) (_)(_____)(_)\_) (_/\/\_)(_____)(____/(____) ~ - Version: 0.10.0 + Version: 0.11.0 =============================================================================== CONTENTS *pymode-contents* diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 67216a07..1be44ad8 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -1,5 +1,5 @@ " vi: fdl=1 -let g:pymode_version = "0.10.0" +let g:pymode_version = "0.11.0" " Enable pymode by default :) call pymode#default('g:pymode', 1) From 2da50daffe1ac0dd20f4783a569f118cf7630942 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Thu, 28 May 2020 21:29:16 -0300 Subject: [PATCH 38/81] Add submodule info in debug messages --- autoload/pymode/debug.vim | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/autoload/pymode/debug.vim b/autoload/pymode/debug.vim index cf139b07..2be5149c 100644 --- a/autoload/pymode/debug.vim +++ b/autoload/pymode/debug.vim @@ -30,7 +30,7 @@ fun! pymode#debug#sysinfo() "{{{ echom pymodevar endfor " }}} - " Github commit info. {{{ + " Git commit info. {{{ " Find in the scriptnames the first occurence of 'python-mode'. Then parse " the result outputting its path. This is in turn fed into the git command. call pymode#debug("Git commit: ") @@ -44,6 +44,13 @@ fun! pymode#debug#sysinfo() "{{{ let l:git_head_sha1 = system('git -C ' . expand(l:pymode_folder). ' rev-parse HEAD ' ) echom join(filter(split(l:git_head_sha1, '\zs'), 'v:val =~? "[0-9A-Fa-f]"'), '') " }}} + " Git submodules status. {{{ + call pymode#debug("Git submodule status:") + let l:git_submodule_status = system('git -C ' . expand(l:pymode_folder). ' submodule status') + for submodule in split(l:git_submodule_status, '\n') + echom submodule + endfor + " }}} call pymode#debug("End of pymode#debug#sysinfo") endfunction "}}} From 0c413a7a8b8649f1cc59a959b92fa88cd88357d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Filip=20Pobo=C5=99il?= Date: Fri, 5 Jun 2020 20:20:37 +0200 Subject: [PATCH 39/81] Add builtin breakpoint (PEP 553) Python 3.7 introduced new builtin function `breakpoint()` (PEP 553) that can be used to insert breakpoints. --- autoload/pymode/breakpoint.vim | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/autoload/pymode/breakpoint.vim b/autoload/pymode/breakpoint.vim index 2692ca34..98639b57 100644 --- a/autoload/pymode/breakpoint.vim +++ b/autoload/pymode/breakpoint.vim @@ -11,10 +11,14 @@ fun! pymode#breakpoint#init() "{{{ from importlib.util import find_spec -for module in ('wdb', 'pudb', 'ipdb', 'pdb'): - if find_spec(module): - vim.command('let g:pymode_breakpoint_cmd = "import %s; %s.set_trace() # XXX BREAKPOINT"' % (module, module)) - break +if sys.version_info >= (3, 7): + vim.command('let g:pymode_breakpoint_cmd = "breakpoint()"') + +else: + for module in ('wdb', 'pudb', 'ipdb', 'pdb'): + if find_spec(module): + vim.command('let g:pymode_breakpoint_cmd = "import %s; %s.set_trace() # XXX BREAKPOINT"' % (module, module)) + break EOF endif From 65407b09b5a0748af2fc15999d5449377db352b3 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Thu, 8 Oct 2020 14:35:35 -0300 Subject: [PATCH 40/81] Update submodules --- submodules/astroid | 2 +- submodules/autopep8 | 2 +- submodules/mccabe | 2 +- submodules/pydocstyle | 2 +- submodules/pylint | 2 +- submodules/rope | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/submodules/astroid b/submodules/astroid index a672051f..2d25e845 160000 --- a/submodules/astroid +++ b/submodules/astroid @@ -1 +1 @@ -Subproject commit a672051f4406da0beb01e88767e8e46488eb71eb +Subproject commit 2d25e84587c3e392751280490355aaeda7afd116 diff --git a/submodules/autopep8 b/submodules/autopep8 index 84cb34ca..972093da 160000 --- a/submodules/autopep8 +++ b/submodules/autopep8 @@ -1 +1 @@ -Subproject commit 84cb34cad9b3e6fe520cb5b355eb52e6b6dd5758 +Subproject commit 972093dad2021ca5214133864af3cd0595830a94 diff --git a/submodules/mccabe b/submodules/mccabe index e92e9e79..535e2c5d 160000 --- a/submodules/mccabe +++ b/submodules/mccabe @@ -1 +1 @@ -Subproject commit e92e9e79799c5796f76f3da821dbb5aa56e41028 +Subproject commit 535e2c5dc8cb9ea8afe79fc4cae5386c20d57394 diff --git a/submodules/pydocstyle b/submodules/pydocstyle index 59396eb5..33244595 160000 --- a/submodules/pydocstyle +++ b/submodules/pydocstyle @@ -1 +1 @@ -Subproject commit 59396eb50d1d1a59fdccdd71cf4031577c02ab54 +Subproject commit 3324459514ddb048fc919ab2ed1f52471b801ab0 diff --git a/submodules/pylint b/submodules/pylint index 6c6ffc30..8197144d 160000 --- a/submodules/pylint +++ b/submodules/pylint @@ -1 +1 @@ -Subproject commit 6c6ffc306f9c9614072bcb2c83fefa838bef1102 +Subproject commit 8197144d82469ed39d1f4f3c345efee198ec9212 diff --git a/submodules/rope b/submodules/rope index bc6908a8..939dd974 160000 --- a/submodules/rope +++ b/submodules/rope @@ -1 +1 @@ -Subproject commit bc6908a82b6f0eb7bb248b600adb3367f42714da +Subproject commit 939dd974835479495cc30710208f7c4a566dd38b From 59efb15bc90fbadc5e5c3cd1bcd7b3be54dcbacd Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Thu, 8 Oct 2020 14:41:58 -0300 Subject: [PATCH 41/81] =?UTF-8?q?Bump=20version:=200.11.0=20=E2=86=92=200.?= =?UTF-8?q?12.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- doc/pymode.txt | 2 +- plugin/pymode.vim | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 9a13d8b0..0e0e6153 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,6 +1,6 @@ [bumpversion] commit = True -current_version = 0.11.0 +current_version = 0.12.0 files = plugin/pymode.vim tag = True tag_name = {new_version} diff --git a/doc/pymode.txt b/doc/pymode.txt index de98f17b..b7ec7be1 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -6,7 +6,7 @@ (__) (__) (__) (_) (_)(_____)(_)\_) (_/\/\_)(_____)(____/(____) ~ - Version: 0.11.0 + Version: 0.12.0 =============================================================================== CONTENTS *pymode-contents* diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 1be44ad8..f71a138a 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -1,5 +1,5 @@ " vi: fdl=1 -let g:pymode_version = "0.11.0" +let g:pymode_version = "0.12.0" " Enable pymode by default :) call pymode#default('g:pymode', 1) From 50cd2cc5dbe58341a44e7894dd464910ba326a44 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Thu, 8 Oct 2020 15:46:07 -0300 Subject: [PATCH 42/81] Add toml submodule This is a new dependency for autopep8 --- .gitmodules | 3 +++ pymode/libs/toml | 1 + submodules/toml | 1 + 3 files changed, 5 insertions(+) create mode 120000 pymode/libs/toml create mode 160000 submodules/toml diff --git a/.gitmodules b/.gitmodules index 15a5dd75..4874edc5 100644 --- a/.gitmodules +++ b/.gitmodules @@ -45,3 +45,6 @@ path = submodules/pylama url = https://github.com/klen/pylama shallow = true +[submodule "submodules/toml"] + path = submodules/toml + url = https://github.com/uiri/toml.git diff --git a/pymode/libs/toml b/pymode/libs/toml new file mode 120000 index 00000000..dc960a0a --- /dev/null +++ b/pymode/libs/toml @@ -0,0 +1 @@ +../../submodules/toml/toml \ No newline at end of file diff --git a/submodules/toml b/submodules/toml new file mode 160000 index 00000000..a86fc1fb --- /dev/null +++ b/submodules/toml @@ -0,0 +1 @@ +Subproject commit a86fc1fbd650a19eba313c3f642c9e2c679dc8d6 From 6316b01b5de9aa10f2fac21ffcd3b48d924aea0e Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Thu, 8 Oct 2020 15:49:15 -0300 Subject: [PATCH 43/81] =?UTF-8?q?Bump=20version:=200.12.0=20=E2=86=92=200.?= =?UTF-8?q?13.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- doc/pymode.txt | 2 +- plugin/pymode.vim | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 0e0e6153..613addba 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,6 +1,6 @@ [bumpversion] commit = True -current_version = 0.12.0 +current_version = 0.13.0 files = plugin/pymode.vim tag = True tag_name = {new_version} diff --git a/doc/pymode.txt b/doc/pymode.txt index b7ec7be1..73660a61 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -6,7 +6,7 @@ (__) (__) (__) (_) (_)(_____)(_)\_) (_/\/\_)(_____)(____/(____) ~ - Version: 0.12.0 + Version: 0.13.0 =============================================================================== CONTENTS *pymode-contents* diff --git a/plugin/pymode.vim b/plugin/pymode.vim index f71a138a..e69f9746 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -1,5 +1,5 @@ " vi: fdl=1 -let g:pymode_version = "0.12.0" +let g:pymode_version = "0.13.0" " Enable pymode by default :) call pymode#default('g:pymode', 1) From aad4aefe013660467c434e51d3f225a00b37820a Mon Sep 17 00:00:00 2001 From: Derek Croote Date: Sun, 1 Nov 2020 16:01:09 -0800 Subject: [PATCH 44/81] Update build status badge from Travis to Actions --- readme.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/readme.md b/readme.md index 47660e45..49b30ea9 100644 --- a/readme.md +++ b/readme.md @@ -1,4 +1,4 @@ -[![Build Status](https://travis-ci.org/python-mode/python-mode.svg?branch=develop)](https://travis-ci.org/python-mode/python-mode) +[![Testing python-mode](https://github.com/python-mode/python-mode/workflows/Testing%20python-mode/badge.svg?branch=develop)](https://github.com/python-mode/python-mode/actions?query=workflow%3A%22Testing+python-mode%22+branch%3Adevelop) ![](https://raw.github.com/python-mode/python-mode/develop/logo.png) # Python-mode, a Python IDE for Vim From 2ebe37e71b2747c062d80c42ec02d1044ad668fa Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 8 May 2021 14:52:12 -0300 Subject: [PATCH 45/81] Updating submodules --- submodules/astroid | 2 +- submodules/autopep8 | 2 +- submodules/mccabe | 2 +- submodules/pycodestyle | 2 +- submodules/pydocstyle | 2 +- submodules/pyflakes | 2 +- submodules/pylint | 2 +- submodules/rope | 2 +- submodules/toml | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/submodules/astroid b/submodules/astroid index 2d25e845..36dda3fc 160000 --- a/submodules/astroid +++ b/submodules/astroid @@ -1 +1 @@ -Subproject commit 2d25e84587c3e392751280490355aaeda7afd116 +Subproject commit 36dda3fc8a5826b19a33a0ff29402b61d6a64fc2 diff --git a/submodules/autopep8 b/submodules/autopep8 index 972093da..32c78a3a 160000 --- a/submodules/autopep8 +++ b/submodules/autopep8 @@ -1 +1 @@ -Subproject commit 972093dad2021ca5214133864af3cd0595830a94 +Subproject commit 32c78a3a07d7ee35500e6f20bfcd621f3132c42e diff --git a/submodules/mccabe b/submodules/mccabe index 535e2c5d..2d4dd943 160000 --- a/submodules/mccabe +++ b/submodules/mccabe @@ -1 +1 @@ -Subproject commit 535e2c5dc8cb9ea8afe79fc4cae5386c20d57394 +Subproject commit 2d4dd9435fcb05aaa89ba0392a84cb1d30a87dc9 diff --git a/submodules/pycodestyle b/submodules/pycodestyle index d4143838..930e2cad 160000 --- a/submodules/pycodestyle +++ b/submodules/pycodestyle @@ -1 +1 @@ -Subproject commit d414383860c483c57d1fafc12c630b46a5616d3c +Subproject commit 930e2cad15df3661306740c30a892a6f1902ef1d diff --git a/submodules/pydocstyle b/submodules/pydocstyle index 33244595..5f59f6eb 160000 --- a/submodules/pydocstyle +++ b/submodules/pydocstyle @@ -1 +1 @@ -Subproject commit 3324459514ddb048fc919ab2ed1f52471b801ab0 +Subproject commit 5f59f6eba0d8f0168c6ab45ee97485569b861b77 diff --git a/submodules/pyflakes b/submodules/pyflakes index c72d6cf1..95fe313b 160000 --- a/submodules/pyflakes +++ b/submodules/pyflakes @@ -1 +1 @@ -Subproject commit c72d6cf1a9a119c1dd7a7674f36da21aea32d828 +Subproject commit 95fe313ba5ca384041472cd171ea60fad910c207 diff --git a/submodules/pylint b/submodules/pylint index 8197144d..3eb0362d 160000 --- a/submodules/pylint +++ b/submodules/pylint @@ -1 +1 @@ -Subproject commit 8197144d82469ed39d1f4f3c345efee198ec9212 +Subproject commit 3eb0362dc42642e3e2774d7523a1e73d71394064 diff --git a/submodules/rope b/submodules/rope index 939dd974..f4b19fd8 160000 --- a/submodules/rope +++ b/submodules/rope @@ -1 +1 @@ -Subproject commit 939dd974835479495cc30710208f7c4a566dd38b +Subproject commit f4b19fd8ccc5325ded9db1c11fe6d25f6082de0c diff --git a/submodules/toml b/submodules/toml index a86fc1fb..3f637dba 160000 --- a/submodules/toml +++ b/submodules/toml @@ -1 +1 @@ -Subproject commit a86fc1fbd650a19eba313c3f642c9e2c679dc8d6 +Subproject commit 3f637dba5f68db63d4b30967fedda51c82459471 From cc29770fa1a1bc9759e50f1b9909e930a5fc0eeb Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 8 May 2021 15:08:41 -0300 Subject: [PATCH 46/81] Update github action/workflow --- .github/workflows/test_pymode.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test_pymode.yml b/.github/workflows/test_pymode.yml index 7b6bab70..e7efc4fc 100644 --- a/.github/workflows/test_pymode.yml +++ b/.github/workflows/test_pymode.yml @@ -3,15 +3,15 @@ name: Testing python-mode on: [push] jobs: - test-python-3_6: + test-python-3: runs-on: ubuntu-latest steps: - uses: actions/checkout@v1 - name: Install dependencies run: | sudo apt update - sudo apt install -yqq libncurses5-dev libatk1.0-dev python-dev python3-dev lua5.1 lua5.1-dev libperl-dev git - sudo apt remove --purge vim vim-runtime gvim + sudo apt install -yqq libncurses5-dev libgtk2.0-dev libatk1.0-dev libcairo2-dev libx11-dev libxpm-dev libxt-dev python3-dev lua5.2 liblua5.2-dev libperl-dev git + sudo apt remove --purge -yqq vim vim-runtime gvim - name: build and install vim from source working-directory: /tmp run: | From f867d28caa6daf8e71788819e7eddcb47827d304 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 8 May 2021 15:20:20 -0300 Subject: [PATCH 47/81] Test for python 3.8 and 3.9 --- .github/workflows/test_pymode.yml | 39 +++++++++++++++++++++++++++++-- 1 file changed, 37 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test_pymode.yml b/.github/workflows/test_pymode.yml index e7efc4fc..332dcdad 100644 --- a/.github/workflows/test_pymode.yml +++ b/.github/workflows/test_pymode.yml @@ -3,21 +3,56 @@ name: Testing python-mode on: [push] jobs: - test-python-3: + test-python-3_8: runs-on: ubuntu-latest steps: - uses: actions/checkout@v1 - name: Install dependencies run: | sudo apt update + export PYTHON_CONFIGURE_OPTS="--enable-shared" sudo apt install -yqq libncurses5-dev libgtk2.0-dev libatk1.0-dev libcairo2-dev libx11-dev libxpm-dev libxt-dev python3-dev lua5.2 liblua5.2-dev libperl-dev git sudo apt remove --purge -yqq vim vim-runtime gvim - name: build and install vim from source working-directory: /tmp run: | + export PYTHON_CONFIGURE_OPTS="--enable-shared" git clone https://github.com/vim/vim.git cd vim - ./configure --with-features=huge --enable-multibyte --enable-python3interp=yes --with-python3-config-dir=/usr/lib/python3.6/config-3.6m-x86_64-linux-gnu --enable-perlinterp=yes --enable-luainterp=yes --enable-cscope --prefix=/usr/local + ./configure --with-features=huge --enable-multibyte --enable-python3interp=yes --with-python3-config-dir=/usr/lib/python3.8/config-3.8m-x86_64-linux-gnu --enable-perlinterp=yes --enable-luainterp=yes --enable-cscope --prefix=/usr/local + sudo make && sudo make install + - name: Install python-mode + run: | + export PYMODE_DIR="${HOME}/work/python-mode/python-mode" + mkdir -p ${HOME}/.vim/pack/foo/start/ + ln -s ${PYMODE_DIR} ${HOME}/.vim/pack/foo/start/python-mode + cp ${PYMODE_DIR}/tests/utils/pymoderc ${HOME}/.pymoderc + cp ${PYMODE_DIR}/tests/utils/vimrc ${HOME}/.vimrc + touch ${HOME}/.vimrc.before ${HOME}/.vimrc.after + - name: Run python-mode test script + run: | + alias python=python3 + cd ${HOME}/work/python-mode/python-mode + git submodule update --init --recursive + git submodule sync + bash tests/test.sh + test-python-3_9: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - name: Install dependencies + run: | + sudo apt update + export PYTHON_CONFIGURE_OPTS="--enable-shared" + sudo apt install -yqq libncurses5-dev libgtk2.0-dev libatk1.0-dev libcairo2-dev libx11-dev libxpm-dev libxt-dev python3-dev lua5.2 liblua5.2-dev libperl-dev git + sudo apt remove --purge -yqq vim vim-runtime gvim + - name: build and install vim from source + working-directory: /tmp + run: | + export PYTHON_CONFIGURE_OPTS="--enable-shared" + git clone https://github.com/vim/vim.git + cd vim + ./configure --with-features=huge --enable-multibyte --enable-python3interp=yes --with-python3-config-dir=/usr/lib/python3.9/config-3.9m-x86_64-linux-gnu --enable-perlinterp=yes --enable-luainterp=yes --enable-cscope --prefix=/usr/local sudo make && sudo make install - name: Install python-mode run: | From 4a0cc9688c013e898308580ae7b4955cabf3f7e6 Mon Sep 17 00:00:00 2001 From: Nathan Pemberton Date: Fri, 9 Jul 2021 09:31:57 -0700 Subject: [PATCH 48/81] Add configurable prefix for rope commands (currently hard-coded to ) --- doc/pymode.txt | 4 ++++ plugin/pymode.vim | 37 ++++++++++++++++++++----------------- 2 files changed, 24 insertions(+), 17 deletions(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index 73660a61..13ed77f1 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -413,6 +413,10 @@ Turn on the rope script *'g:pymode_rope' > let g:pymode_rope = 1 +Set the prefix for rope commands *'g:pymode_rope_prefix'* +> + let g:pymode_rope_refix = '' + .ropeproject Folder ~ *.ropeproject* diff --git a/plugin/pymode.vim b/plugin/pymode.vim index e69f9746..0b509735 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -1,6 +1,7 @@ " vi: fdl=1 let g:pymode_version = "0.13.0" + " Enable pymode by default :) call pymode#default('g:pymode', 1) call pymode#default('g:pymode_debug', 0) @@ -182,6 +183,7 @@ call pymode#default('g:pymode_breakpoint_cmd', '') " " Rope support call pymode#default('g:pymode_rope', 0) +call pymode#default('g:pymode_rope_prefix', '') " System plugin variable if g:pymode_rope @@ -210,7 +212,7 @@ if g:pymode_rope call pymode#default('g:pymode_rope_autoimport_modules', ['os', 'shutil', 'datetime']) " Bind keys to autoimport module for object under cursor - call pymode#default('g:pymode_rope_autoimport_bind', 'ra') + call pymode#default('g:pymode_rope_autoimport_bind', g:pymode_rope_prefix . 'ra') " Automatic completion on dot call pymode#default('g:pymode_rope_complete_on_dot', 1) @@ -219,56 +221,57 @@ if g:pymode_rope call pymode#default('g:pymode_rope_completion_bind', '') " Bind keys for goto definition (leave empty for disable) - call pymode#default('g:pymode_rope_goto_definition_bind', 'g') + " call pymode#default('g:pymode_rope_goto_definition_bind', g:pymode_rope_prefix . 'g') + call pymode#default('g:pymode_rope_goto_definition_bind', g:pymode_rope_prefix . 'g') " set command for open definition (e, new, vnew) call pymode#default('g:pymode_rope_goto_definition_cmd', 'new') " Bind keys for show documentation (leave empty for disable) - call pymode#default('g:pymode_rope_show_doc_bind', 'd') + call pymode#default('g:pymode_rope_show_doc_bind', g:pymode_rope_prefix . 'd') " Bind keys for find occurencies (leave empty for disable) - call pymode#default('g:pymode_rope_find_it_bind', 'f') + call pymode#default('g:pymode_rope_find_it_bind', g:pymode_rope_prefix . 'f') " Bind keys for organize imports (leave empty for disable) - call pymode#default('g:pymode_rope_organize_imports_bind', 'ro') + call pymode#default('g:pymode_rope_organize_imports_bind', g:pymode_rope_prefix . 'ro') " Bind keys for rename variable/method/class in the project (leave empty for disable) - call pymode#default('g:pymode_rope_rename_bind', 'rr') + call pymode#default('g:pymode_rope_rename_bind', g:pymode_rope_prefix . 'rr') " Bind keys for rename module - call pymode#default('g:pymode_rope_rename_module_bind', 'r1r') + call pymode#default('g:pymode_rope_rename_module_bind', g:pymode_rope_prefix . 'r1r') " Bind keys for convert module to package - call pymode#default('g:pymode_rope_module_to_package_bind', 'r1p') + call pymode#default('g:pymode_rope_module_to_package_bind', g:pymode_rope_prefix . 'r1p') " Creates a new function or method (depending on the context) from the selected lines - call pymode#default('g:pymode_rope_extract_method_bind', 'rm') + call pymode#default('g:pymode_rope_extract_method_bind', g:pymode_rope_prefix . 'rm') " Creates a variable from the selected lines - call pymode#default('g:pymode_rope_extract_variable_bind', 'rl') + call pymode#default('g:pymode_rope_extract_variable_bind', g:pymode_rope_prefix . 'rl') " Inline refactoring - call pymode#default('g:pymode_rope_inline_bind', 'ri') + call pymode#default('g:pymode_rope_inline_bind', g:pymode_rope_prefix . 'ri') " Move refactoring - call pymode#default('g:pymode_rope_move_bind', 'rv') + call pymode#default('g:pymode_rope_move_bind', g:pymode_rope_prefix . 'rv') " Generate function - call pymode#default('g:pymode_rope_generate_function_bind', 'rnf') + call pymode#default('g:pymode_rope_generate_function_bind', g:pymode_rope_prefix . 'rnf') " Generate class - call pymode#default('g:pymode_rope_generate_class_bind', 'rnc') + call pymode#default('g:pymode_rope_generate_class_bind', g:pymode_rope_prefix . 'rnc') " Generate package - call pymode#default('g:pymode_rope_generate_package_bind', 'rnp') + call pymode#default('g:pymode_rope_generate_package_bind', g:pymode_rope_prefix . 'rnp') " Change signature - call pymode#default('g:pymode_rope_change_signature_bind', 'rs') + call pymode#default('g:pymode_rope_change_signature_bind', g:pymode_rope_prefix . 'rs') " Tries to find the places in which a function can be used and changes the " code to call it instead - call pymode#default('g:pymode_rope_use_function_bind', 'ru') + call pymode#default('g:pymode_rope_use_function_bind', g:pymode_rope_prefix . 'ru') " Regenerate project cache on every save call pymode#default('g:pymode_rope_regenerate_on_write', 1) From 04caeebcc76158ee269b222b6710555406b05284 Mon Sep 17 00:00:00 2001 From: Nathan Pemberton Date: Fri, 9 Jul 2021 09:46:30 -0700 Subject: [PATCH 49/81] Add NathanTP to authors --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index 6c2c6b95..a4bcbf28 100644 --- a/AUTHORS +++ b/AUTHORS @@ -75,3 +75,4 @@ Contributors: * Yury A. Kartynnik (https://github.com/kartynnik); * Xiangyu Xu (https://github.com/bkbncn); * Zach Himsel (https://github.com/zhimsel); +* Nathan Pemberton (https://github.com/NathanTP); From 97b72094b336a9549f49203e9fd21ceee8c16468 Mon Sep 17 00:00:00 2001 From: Nathan Pemberton Date: Tue, 13 Jul 2021 09:48:12 -0700 Subject: [PATCH 50/81] Remove extraneous commented line --- plugin/pymode.vim | 1 - 1 file changed, 1 deletion(-) diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 0b509735..5dabc6d9 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -221,7 +221,6 @@ if g:pymode_rope call pymode#default('g:pymode_rope_completion_bind', '') " Bind keys for goto definition (leave empty for disable) - " call pymode#default('g:pymode_rope_goto_definition_bind', g:pymode_rope_prefix . 'g') call pymode#default('g:pymode_rope_goto_definition_bind', g:pymode_rope_prefix . 'g') " set command for open definition (e, new, vnew) From 7487b7965b3174c5c7f52e0f0022492089c65e07 Mon Sep 17 00:00:00 2001 From: Jongwook Choi Date: Thu, 12 Aug 2021 04:32:31 -0400 Subject: [PATCH 51/81] Add option g:pymode_indent_hanging_width This is an option for hanging indent size after an open parenthesis in line continuations. It defaults to `&shiftwidth` but can be assigned a different value. For example, hanging indent size can be set to 4 (even if the tabsize or shiftwidth is not 4) as per Google Python Style Guide. --- autoload/pymode/indent.vim | 4 +++- doc/pymode.txt | 10 ++++++++++ plugin/pymode.vim | 3 +++ 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/autoload/pymode/indent.vim b/autoload/pymode/indent.vim index efd41f29..e964f378 100644 --- a/autoload/pymode/indent.vim +++ b/autoload/pymode/indent.vim @@ -24,7 +24,9 @@ function! pymode#indent#get_indent(lnum) if closing_paren return indent(parlnum) else - return indent(parlnum) + &shiftwidth + let l:indent_width = (g:pymode_indent_hanging_width > 0 ? + \ g:pymode_indent_hanging_width : &shiftwidth) + return indent(parlnum) + l:indent_width endif else return parcol diff --git a/doc/pymode.txt b/doc/pymode.txt index 73660a61..6d047698 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -170,6 +170,16 @@ Enable pymode indentation *'g:pymode_indent' > let g:pymode_indent = 1 + +Customization: + +Hanging indent size after an open parenthesis or bracket (but nothing after the +parenthesis), when vertical alignment is not used. Defaults to `&shiftwidth`. + *'g:pymode_indent_hanging_width'* +> + let g:pymode_indent_hanging_width = &shiftwidth + let g:pymode_indent_hanging_width = 4 + ------------------------------------------------------------------------------- 2.3 Python folding ~ *pymode-folding* diff --git a/plugin/pymode.vim b/plugin/pymode.vim index e69f9746..9c01a9d5 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -38,6 +38,9 @@ call pymode#default('g:pymode_doc_bind', 'K') " Enable/Disable pymode PEP8 indentation call pymode#default("g:pymode_indent", 1) +" Customize hanging indent size different than &shiftwidth +call pymode#default("g:pymode_indent_hanging_width", -1) + " TODO: currently folding suffers from a bad performance and incorrect " implementation. This feature should be considered experimental. " Enable/disable pymode folding for pyfiles. From 0c47c692fa3f8ae2c360c63d59b91d1beeca4087 Mon Sep 17 00:00:00 2001 From: Lie Ryan Date: Thu, 2 Sep 2021 22:22:28 +1000 Subject: [PATCH 52/81] Fix MoveRefactoring Global and Module Move refactoring requires that `dest` be a rope module, while Method Move refactoring requires that `dest` be an attribute name. --- pymode/rope.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/pymode/rope.py b/pymode/rope.py index ba5f55b2..fc567a12 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -701,6 +701,15 @@ def get_refactor(ctx): offset = None return move.create_move(ctx.project, ctx.resource, offset) + @staticmethod + def get_changes(refactor, input_str, in_hierarchy=False): + with RopeContext() as ctx: + if isinstance(refactor, (move.MoveGlobal, move.MoveModule)): + dest = ctx.project.pycore.find_module(input_str) + else: + dest = input_str + return super(MoveRefactoring, MoveRefactoring).get_changes(refactor, dest, in_hierarchy=in_hierarchy) + class ChangeSignatureRefactoring(Refactoring): From 724d2c4dfeacfdf66e2bde60bac3d4f8bcce76df Mon Sep 17 00:00:00 2001 From: Lie Ryan Date: Thu, 2 Sep 2021 23:12:32 +1000 Subject: [PATCH 53/81] Document how to use Global Move refactoring --- doc/pymode.txt | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index 6d047698..cf5688ab 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -613,14 +613,24 @@ code to call it instead. let g:pymode_rope_use_function_bind = 'ru' -Move method/fields ~ +Move refactoring ~ *pymode-rope-move* +Moving method/fields + It happens when you perform move refactoring on a method of a class. In this refactoring, a method of a class is moved to the class of one of its attributes. The old method will call the new method. If you want to change all of the occurrences of the old method to use the new method you can inline it afterwards. + +Moving global variable/class/function into another module + +It happens when you perform move refactoring on global variable/class/function. +In this refactoring, the object being refactored will be moved to a destination +module. All references to the object being moved will be updated to point to +the new location. + > let g:pymode_rope_move_bind = 'rv' From b70ec576e97ff509dd13981ef74383074e906de8 Mon Sep 17 00:00:00 2001 From: Lie Ryan Date: Thu, 2 Sep 2021 23:22:32 +1000 Subject: [PATCH 54/81] Document how to use Module Move refactoring --- doc/pymode.txt | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/doc/pymode.txt b/doc/pymode.txt index cf5688ab..33b2bfa8 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -631,6 +631,13 @@ In this refactoring, the object being refactored will be moved to a destination module. All references to the object being moved will be updated to point to the new location. +Moving module variable/class/function into a package + +It happens when you perform move refactoring on a name referencing a module. +In this refactoring, the module being refactored will be moved to a destination +package. All references to the object being moved will be updated to point to +the new location. + > let g:pymode_rope_move_bind = 'rv' From 577f3ed1a1d28679c3d7d52237a65c8b032ef032 Mon Sep 17 00:00:00 2001 From: Lie Ryan Date: Sat, 18 Sep 2021 14:41:18 +1000 Subject: [PATCH 55/81] Update rope submodule --- submodules/rope | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/submodules/rope b/submodules/rope index f4b19fd8..4912fec6 160000 --- a/submodules/rope +++ b/submodules/rope @@ -1 +1 @@ -Subproject commit f4b19fd8ccc5325ded9db1c11fe6d25f6082de0c +Subproject commit 4912fec66b3387a7a5173dca79135d9e5b182128 From 89ae9a1c73a0ec0a90af37fda479f2d68b7f3b86 Mon Sep 17 00:00:00 2001 From: Lie Ryan Date: Sun, 19 Sep 2021 23:16:09 +1000 Subject: [PATCH 56/81] Update rope submodule to 0.20.1 --- submodules/rope | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/submodules/rope b/submodules/rope index 4912fec6..62af070a 160000 --- a/submodules/rope +++ b/submodules/rope @@ -1 +1 @@ -Subproject commit 4912fec66b3387a7a5173dca79135d9e5b182128 +Subproject commit 62af070aa5ed3505a2629a76778003ce7fd383f0 From 6c51814bfe723d0bfb2994f4ebd728026f944785 Mon Sep 17 00:00:00 2001 From: Lie Ryan Date: Mon, 20 Sep 2021 03:10:54 +1000 Subject: [PATCH 57/81] Refactor extract get_code_actions() --- pymode/rope.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/pymode/rope.py b/pymode/rope.py index ba5f55b2..c15a0018 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -463,10 +463,11 @@ def run(self): if not input_str: return False + code_actions = self.get_code_actions() action = env.user_input_choices( - 'Choose what to do:', 'perform', 'preview', - 'perform in class hierarchy', - 'preview in class hierarchy') + 'Choose what to do:', + *code_actions, + ) in_hierarchy = action.endswith("in class hierarchy") @@ -492,6 +493,14 @@ def run(self): except Exception as e: # noqa env.error('Unhandled exception in Pymode: %s' % e) + def get_code_actions(self): + return [ + 'perform', + 'preview', + 'perform in class hierarchy', + 'preview in class hierarchy', + ] + @staticmethod def get_refactor(ctx): """ Get refactor object. """ From 41ed4df22ba41ed01d15b19bfe2ade0770b95cbb Mon Sep 17 00:00:00 2001 From: Lie Ryan Date: Mon, 20 Sep 2021 03:37:31 +1000 Subject: [PATCH 58/81] Don't present irrelevant in_hierarchy options --- pymode/rope.py | 26 ++++++++++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/pymode/rope.py b/pymode/rope.py index c15a0018..3c0d16ac 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -497,8 +497,6 @@ def get_code_actions(self): return [ 'perform', 'preview', - 'perform in class hierarchy', - 'preview in class hierarchy', ] @staticmethod @@ -555,6 +553,14 @@ def get_input_str(self, refactor, ctx): return newname + def get_code_actions(self): + return [ + 'perform', + 'preview', + 'perform in class hierarchy', + 'preview in class hierarchy', + ] + @staticmethod def get_changes(refactor, input_str, in_hierarchy=False): """ Get changes. @@ -710,6 +716,14 @@ def get_refactor(ctx): offset = None return move.create_move(ctx.project, ctx.resource, offset) + def get_code_actions(self): + return [ + 'perform', + 'preview', + 'perform in class hierarchy', + 'preview in class hierarchy', + ] + class ChangeSignatureRefactoring(Refactoring): @@ -737,6 +751,14 @@ def get_refactor(ctx): return change_signature.ChangeSignature( ctx.project, ctx.resource, offset) + def get_code_actions(self): + return [ + 'perform', + 'preview', + 'perform in class hierarchy', + 'preview in class hierarchy', + ] + def get_changes(self, refactor, input_string, in_hierarchy=False): """ Function description. From f78ff46ddd241a1b605144b0a1bcfa9ea49ef64b Mon Sep 17 00:00:00 2001 From: Lie Ryan Date: Fri, 1 Oct 2021 09:41:20 +1000 Subject: [PATCH 59/81] Implement select logical line --- after/ftplugin/python.vim | 2 ++ autoload/pymode/rope.vim | 4 ++++ pymode/environment.py | 3 +++ pymode/rope.py | 14 +++++++++++++- 4 files changed, 22 insertions(+), 1 deletion(-) diff --git a/after/ftplugin/python.vim b/after/ftplugin/python.vim index 0fdd01a3..6b5a8839 100644 --- a/after/ftplugin/python.vim +++ b/after/ftplugin/python.vim @@ -42,6 +42,8 @@ if g:pymode_motion vnoremap aM :call pymode#motion#select('^s*(asyncs+)=@', '^s*(asyncs+)=defs', 0) vnoremap iM :call pymode#motion#select('^s*(asyncs+)=@', '^s*(asyncs+)=defs', 1) + onoremap V :call pymode#rope#select_logical_line() + endif if g:pymode_rope && g:pymode_rope_completion diff --git a/autoload/pymode/rope.vim b/autoload/pymode/rope.vim index c1a2de0c..36344d0a 100644 --- a/autoload/pymode/rope.vim +++ b/autoload/pymode/rope.vim @@ -194,3 +194,7 @@ fun! pymode#rope#generate_package() "{{{ endif PymodePython rope.GenerateElementRefactoring('package').run() endfunction "}}} + +fun! pymode#rope#select_logical_line() "{{{ + PymodePython rope.select_logical_line() +endfunction "}}} diff --git a/pymode/environment.py b/pymode/environment.py index 30ae0e50..86527f56 100644 --- a/pymode/environment.py +++ b/pymode/environment.py @@ -242,5 +242,8 @@ def goto_buffer(bufnr): if str(bufnr) != '-1': vim.command('buffer %s' % bufnr) + def select_line(self, start, end): + vim.command('normal %sggV%sgg' % (start, end)) + env = VimPymodeEnviroment() diff --git a/pymode/rope.py b/pymode/rope.py index ba5f55b2..c44dcc30 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -5,7 +5,7 @@ import site import sys -from rope.base import project, libutils, exceptions, change, worder, pycore +from rope.base import project, libutils, exceptions, change, worder, pycore, codeanalyze from rope.base.fscommands import FileSystemCommands # noqa from rope.base.taskhandle import TaskHandle # noqa from rope.contrib import autoimport as rope_autoimport, codeassist, findit, generate # noqa @@ -921,6 +921,18 @@ def _insert_import(name, module, ctx): reload_changes(changes) +@env.catch_exceptions +def select_logical_line(): + source, offset = env.get_offset_params() + + lines = codeanalyze.SourceLinesAdapter(source) + lineno = lines.get_line_number(offset) + line_finder = codeanalyze.LogicalLineFinder(lines) + start, end = line_finder.logical_line_in(lineno) + + env.select_line(start, end) + + # Monkey patch Rope def find_source_folders(self, folder): """Look only python files an packages.""" From c47e6dbc55abf1d52cc61ac600a333ade3ba0f51 Mon Sep 17 00:00:00 2001 From: Lie Ryan Date: Fri, 1 Oct 2021 10:17:48 +1000 Subject: [PATCH 60/81] Fix documentation class and function text objects don't work in normal mode --- doc/pymode.txt | 8 ++++---- pymode/rope.py | 10 +++++++--- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index 6d047698..8b824618 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -209,10 +209,10 @@ Key Command ]] Jump to next class or function (normal, visual, operator modes) [M Jump to previous class or method (normal, visual, operator modes) ]M Jump to next class or method (normal, visual, operator modes) -aC Select a class. Ex: vaC, daC, yaC, caC (normal, operator modes) -iC Select inner class. Ex: viC, diC, yiC, ciC (normal, operator modes) -aM Select a function or method. Ex: vaM, daM, yaM, caM (normal, operator modes) -iM Select inner function or method. Ex: viM, diM, yiM, ciM (normal, operator modes) +aC Select a class. Ex: vaC, daC, yaC, caC (operator modes) +iC Select inner class. Ex: viC, diC, yiC, ciC (operator modes) +aM Select a function or method. Ex: vaM, daM, yaM, caM (operator modes) +iM Select inner function or method. Ex: viM, diM, yiM, ciM (operator modes) ==== ============================ Enable pymode-motion *'g:pymode_motion'* diff --git a/pymode/rope.py b/pymode/rope.py index c44dcc30..3f77f839 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -924,13 +924,17 @@ def _insert_import(name, module, ctx): @env.catch_exceptions def select_logical_line(): source, offset = env.get_offset_params() + count = int(env.var('v:count1')) lines = codeanalyze.SourceLinesAdapter(source) - lineno = lines.get_line_number(offset) + start_line = lines.get_line_number(offset) line_finder = codeanalyze.LogicalLineFinder(lines) - start, end = line_finder.logical_line_in(lineno) - env.select_line(start, end) + start_lineno, _ = line_finder.logical_line_in(start_line) + for _, (_, end_lineno) in zip(range(count), line_finder.generate_regions(start_line)): + pass + + env.select_line(start_lineno, end_lineno) # Monkey patch Rope From a50403cb81c71493bc5c77f5270c87372de2a93b Mon Sep 17 00:00:00 2001 From: Lie Ryan Date: Fri, 1 Oct 2021 10:18:42 +1000 Subject: [PATCH 61/81] Document logical line selection --- doc/pymode.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/pymode.txt b/doc/pymode.txt index 8b824618..3a9e9035 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -213,6 +213,7 @@ aC Select a class. Ex: vaC, daC, yaC, caC (operator modes) iC Select inner class. Ex: viC, diC, yiC, ciC (operator modes) aM Select a function or method. Ex: vaM, daM, yaM, caM (operator modes) iM Select inner function or method. Ex: viM, diM, yiM, ciM (operator modes) +V Select logical line. Ex: dV, yV, cV (operator modes), also works with count ==== ============================ Enable pymode-motion *'g:pymode_motion'* From 9b4a338a8876c119f3798696f64910882757766f Mon Sep 17 00:00:00 2001 From: Lie Ryan Date: Fri, 1 Oct 2021 12:25:58 +1000 Subject: [PATCH 62/81] Fix logical line off by one --- pymode/rope.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pymode/rope.py b/pymode/rope.py index 3f77f839..4694fe1f 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -930,8 +930,8 @@ def select_logical_line(): start_line = lines.get_line_number(offset) line_finder = codeanalyze.LogicalLineFinder(lines) - start_lineno, _ = line_finder.logical_line_in(start_line) - for _, (_, end_lineno) in zip(range(count), line_finder.generate_regions(start_line)): + start_lineno, end_lineno = line_finder.logical_line_in(start_line) + for _, (_, end_lineno) in zip(range(count - 1), line_finder.generate_regions(start_line)): pass env.select_line(start_lineno, end_lineno) From 0beb5f8538901e4f300afccc91ec87a3a8e20f57 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Filip=20Pobo=C5=99il?= Date: Sun, 14 Nov 2021 15:15:52 +0100 Subject: [PATCH 63/81] Add syntax highlight for walrus operator `:=` operator (PEP 572) --- doc/pymode.txt | 4 ++++ syntax/python.vim | 7 +++++++ 2 files changed, 11 insertions(+) diff --git a/doc/pymode.txt b/doc/pymode.txt index 9991ab21..4e565879 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -674,6 +674,10 @@ Highlight '=' operator *'g:pymode_syntax_highlight_equal_operator' > let g:pymode_syntax_highlight_equal_operator = g:pymode_syntax_all +Highlight ':=' operator *'g:pymode_syntax_highlight_walrus_operator'* +> + let g:pymode_syntax_highlight_walrus_operator = g:pymode_syntax_all + Highlight '*' operator *'g:pymode_syntax_highlight_stars_operator'* > let g:pymode_syntax_highlight_stars_operator = g:pymode_syntax_all diff --git a/syntax/python.vim b/syntax/python.vim index b7666d86..70bcfa0c 100644 --- a/syntax/python.vim +++ b/syntax/python.vim @@ -23,6 +23,9 @@ call pymode#default("g:pymode_syntax_highlight_async_await", g:pymode_syntax_all " Highlight '=' operator call pymode#default('g:pymode_syntax_highlight_equal_operator', g:pymode_syntax_all) +" Highlight ':=' operator +call pymode#default('g:pymode_syntax_highlight_walrus_operator', g:pymode_syntax_all) + " Highlight '*' operator call pymode#default('g:pymode_syntax_highlight_stars_operator', g:pymode_syntax_all) @@ -114,6 +117,10 @@ endif syn match pythonExtraOperator "\%(=\)" endif + if g:pymode_syntax_highlight_walrus_operator + syn match pythonExtraOperator "\%(:=\)" + endif + if g:pymode_syntax_highlight_stars_operator syn match pythonExtraOperator "\%(\*\|\*\*\)" endif From 000232fc4514c68e7a4f4bb6cae0f8282a06ebc5 Mon Sep 17 00:00:00 2001 From: Lie Ryan Date: Tue, 7 Dec 2021 07:58:19 +1100 Subject: [PATCH 64/81] Add tests for logical line text object --- .../test_procedures_vimscript/textobject.vim | 54 +++++++++++++++++++ 1 file changed, 54 insertions(+) diff --git a/tests/test_procedures_vimscript/textobject.vim b/tests/test_procedures_vimscript/textobject.vim index cee9f985..cbd4ef05 100644 --- a/tests/test_procedures_vimscript/textobject.vim +++ b/tests/test_procedures_vimscript/textobject.vim @@ -1,3 +1,6 @@ +set noautoindent +let g:pymode_rope=1 + " Load sample python file. " With 'def'. execute "normal! idef func1():\ a = 1\" @@ -22,6 +25,57 @@ let content=getline('^', '$') call assert_true(content == ['class Class2():', ' b = 2', '', 'class Class1():', ' a = 1']) +" Clean file. +%delete + +" With 'def'. +execute "normal! iprint(\ 1\)\" +execute "normal! iprint(\ 2\)\" +execute "normal! iprint(\ 3\)\" +normal 4ggdV + +let content=getline('^', '$') +call assert_true(content == [ +\ "print(", " 1", ")", +\ "print(", " 3", ")", +\ "" +\]) + + +" Clean file. +%delete + +" With 'def'. +execute "normal! iprint(\ 1\)\" +execute "normal! iprint(\ 2\)\" +execute "normal! iprint(\ 3\)\" +execute "normal! iprint(\ 4\)\" +normal 5ggd2V + +let content=getline('^', '$') +call assert_true(content == [ +\ "print(", " 1", ")", +\ "print(", " 4", ")", +\ "" +\]) + +" Clean file. +%delete + +" With 'def'. +execute "normal! iprint(\ 1\)\" +execute "normal! iprint(\ 2\)\" +execute "normal! iprint(\ 3\)\" +execute "normal! iprint(\ 4\)\" +normal 5ggd2V + +let content=getline('^', '$') +call assert_true(content == [ +\ "print(", " 1", ")", +\ "print(", " 4", ")", +\ "" +\]) + if len(v:errors) > 0 cquit! else From 7c5731dc16ab4e4369aa08560d609a9d08022988 Mon Sep 17 00:00:00 2001 From: Lie Ryan Date: Tue, 7 Dec 2021 07:58:59 +1100 Subject: [PATCH 65/81] Fix logical line text object when used with count --- pymode/rope.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pymode/rope.py b/pymode/rope.py index 4694fe1f..c34817b2 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -931,7 +931,7 @@ def select_logical_line(): line_finder = codeanalyze.LogicalLineFinder(lines) start_lineno, end_lineno = line_finder.logical_line_in(start_line) - for _, (_, end_lineno) in zip(range(count - 1), line_finder.generate_regions(start_line)): + for _, (_, end_lineno) in zip(range(count), line_finder.generate_regions(start_lineno)): pass env.select_line(start_lineno, end_lineno) From d090c736ace5bbaaa82b4f123662b36f3e7f3cb3 Mon Sep 17 00:00:00 2001 From: Neil Girdhar Date: Sun, 5 Dec 2021 12:15:05 -0500 Subject: [PATCH 66/81] Remove dead keywords and builtins; add match, case --- syntax/python.vim | 55 +++++++++++++++++++++++++---------------------- 1 file changed, 29 insertions(+), 26 deletions(-) diff --git a/syntax/python.vim b/syntax/python.vim index 70bcfa0c..5a76d2b0 100644 --- a/syntax/python.vim +++ b/syntax/python.vim @@ -94,7 +94,7 @@ endif syn match pythonClassParameters "[^,\*]*" contained contains=pythonBuiltin,pythonBuiltinObj,pythonBuiltinType,pythonExtraOperatorpythonStatement,pythonBrackets,pythonString,pythonComment skipwhite syn keyword pythonRepeat for while - syn keyword pythonConditional if elif else + syn keyword pythonConditional if elif else match case syn keyword pythonInclude import from syn keyword pythonException try except finally syn keyword pythonOperator and in is not or @@ -269,26 +269,29 @@ endif " Builtin objects and types if g:pymode_syntax_builtin_objs - syn keyword pythonBuiltinObj True False Ellipsis None NotImplemented - syn keyword pythonBuiltinObj __debug__ __doc__ __file__ __name__ __package__ + " True, False, Ellipsis, and None are in fact keywords. + syn keyword pythonBuiltinObj True False Ellipsis None + syn keyword pythonBuiltinObj NotImplemented + syn keyword pythonBuiltinObj __debug__ __doc__ __file__ __name__ __package__ __loader__ + syn keyword pythonBuiltinObj __spec__ __cached__ __annotations__ endif if g:pymode_syntax_builtin_types syn keyword pythonBuiltinType type object - syn keyword pythonBuiltinType str basestring unicode buffer bytearray bytes chr unichr - syn keyword pythonBuiltinType dict int long bool float complex set frozenset list tuple - syn keyword pythonBuiltinType file super + syn keyword pythonBuiltinType str bytearray bytes chr + syn keyword pythonBuiltinType dict int bool float complex set frozenset list tuple + syn keyword pythonBuiltinType super endif " Builtin functions if g:pymode_syntax_builtin_funcs - syn keyword pythonBuiltinFunc __import__ abs all any apply - syn keyword pythonBuiltinFunc bin callable classmethod cmp coerce compile + syn keyword pythonBuiltinFunc __import__ abs all any + syn keyword pythonBuiltinFunc bin callable classmethod compile syn keyword pythonBuiltinFunc delattr dir divmod enumerate eval execfile filter syn keyword pythonBuiltinFunc format getattr globals locals hasattr hash help hex id - syn keyword pythonBuiltinFunc input intern isinstance issubclass iter len map max min - syn keyword pythonBuiltinFunc next oct open ord pow property range xrange - syn keyword pythonBuiltinFunc raw_input reduce reload repr reversed round setattr + syn keyword pythonBuiltinFunc input isinstance issubclass iter len map max min + syn keyword pythonBuiltinFunc next oct open ord pow property range + syn keyword pythonBuiltinFunc repr reversed round setattr syn keyword pythonBuiltinFunc slice sorted staticmethod sum vars zip if g:pymode_syntax_print_as_function @@ -299,31 +302,31 @@ endif " Builtin exceptions and warnings if g:pymode_syntax_highlight_exceptions - syn keyword pythonExClass BaseException - syn keyword pythonExClass Exception StandardError ArithmeticError - syn keyword pythonExClass LookupError EnvironmentError - syn keyword pythonExClass AssertionError AttributeError BufferError EOFError - syn keyword pythonExClass FloatingPointError GeneratorExit IOError - syn keyword pythonExClass ImportError IndexError KeyError - syn keyword pythonExClass KeyboardInterrupt MemoryError NameError + syn keyword pythonExClass BaseException Exception ArithmeticError + syn keyword pythonExClass BufferError LookupError + syn keyword pythonExClass AssertionError AttributeError EOFError + syn keyword pythonExClass FloatingPointError GeneratorExit + syn keyword pythonExClass ImportError ModuleNotFoundError IndexError + syn keyword pythonExClass KeyError KeyboardInterrupt MemoryError NameError syn keyword pythonExClass NotImplementedError OSError OverflowError - syn keyword pythonExClass ReferenceError RuntimeError StopIteration - syn keyword pythonExClass SyntaxError IndentationError TabError + syn keyword pythonExClass RecursionError ReferenceError RuntimeError StopIteration + syn keyword pythonExClass StopAsyncIteration SyntaxError IndentationError TabError syn keyword pythonExClass SystemError SystemExit TypeError syn keyword pythonExClass UnboundLocalError UnicodeError syn keyword pythonExClass UnicodeEncodeError UnicodeDecodeError - syn keyword pythonExClass UnicodeTranslateError ValueError VMSError + syn keyword pythonExClass UnicodeTranslateError ValueError + syn keyword pythonExClass ZeroDivisionError EnvironmentError IOError + syn keyword pythonExClass WindowsError syn keyword pythonExClass BlockingIOError ChildProcessError ConnectionError syn keyword pythonExClass BrokenPipeError ConnectionAbortedError syn keyword pythonExClass ConnectionRefusedError ConnectionResetError syn keyword pythonExClass FileExistsError FileNotFoundError InterruptedError syn keyword pythonExClass IsADirectoryError NotADirectoryError PermissionError syn keyword pythonExClass ProcessLookupError TimeoutError - syn keyword pythonExClass WindowsError ZeroDivisionError - syn keyword pythonExClass Warning UserWarning BytesWarning DeprecationWarning - syn keyword pythonExClass PendingDepricationWarning SyntaxWarning - syn keyword pythonExClass RuntimeWarning FutureWarning - syn keyword pythonExClass ImportWarning UnicodeWarning + syn keyword pythonExClass Warning UserWarning DeprecationWarning PendingDeprecationWarning + syn keyword pythonExClass SyntaxWarning RuntimeWarning FutureWarning + syn keyword pythonExClass ImportWarning UnicodeWarning EncodingWarning + syn keyword pythonExClass BytesWarning ResourceWarning endif " }}} From 3dc75c2a97e729dc89a9ea3c4b61a9097d1810bf Mon Sep 17 00:00:00 2001 From: Lie Ryan Date: Fri, 25 Nov 2022 17:05:13 +1100 Subject: [PATCH 67/81] Update rope==1.5.1 --- .gitmodules | 9 +++++++++ pymode/libs/appdirs.py | 1 + pymode/libs/pytoolconfig | 1 + pymode/libs/tomli | 1 + submodules/appdirs | 1 + submodules/pytoolconfig | 1 + submodules/rope | 2 +- submodules/tomli | 1 + 8 files changed, 16 insertions(+), 1 deletion(-) create mode 120000 pymode/libs/appdirs.py create mode 120000 pymode/libs/pytoolconfig create mode 120000 pymode/libs/tomli create mode 160000 submodules/appdirs create mode 160000 submodules/pytoolconfig create mode 160000 submodules/tomli diff --git a/.gitmodules b/.gitmodules index 4874edc5..1ef5f423 100644 --- a/.gitmodules +++ b/.gitmodules @@ -48,3 +48,12 @@ [submodule "submodules/toml"] path = submodules/toml url = https://github.com/uiri/toml.git +[submodule "submodules/pytoolconfig"] + path = submodules/pytoolconfig + url = git@github.com:bagel897/pytoolconfig.git +[submodule "submodules/tomli"] + path = submodules/tomli + url = git@github.com:hukkin/tomli.git +[submodule "submodules/appdirs"] + path = submodules/appdirs + url = git@github.com:ActiveState/appdirs.git diff --git a/pymode/libs/appdirs.py b/pymode/libs/appdirs.py new file mode 120000 index 00000000..da7cbf20 --- /dev/null +++ b/pymode/libs/appdirs.py @@ -0,0 +1 @@ +../../submodules/appdirs/appdirs.py \ No newline at end of file diff --git a/pymode/libs/pytoolconfig b/pymode/libs/pytoolconfig new file mode 120000 index 00000000..0a2d520c --- /dev/null +++ b/pymode/libs/pytoolconfig @@ -0,0 +1 @@ +../../submodules/pytoolconfig/pytoolconfig/ \ No newline at end of file diff --git a/pymode/libs/tomli b/pymode/libs/tomli new file mode 120000 index 00000000..2413e2b5 --- /dev/null +++ b/pymode/libs/tomli @@ -0,0 +1 @@ +../../submodules/tomli/src/tomli \ No newline at end of file diff --git a/submodules/appdirs b/submodules/appdirs new file mode 160000 index 00000000..193a2cbb --- /dev/null +++ b/submodules/appdirs @@ -0,0 +1 @@ +Subproject commit 193a2cbba58cce2542882fcedd0e49f6763672ed diff --git a/submodules/pytoolconfig b/submodules/pytoolconfig new file mode 160000 index 00000000..549787fa --- /dev/null +++ b/submodules/pytoolconfig @@ -0,0 +1 @@ +Subproject commit 549787fa7d100c93333f48aaa9b07619f171736e diff --git a/submodules/rope b/submodules/rope index 62af070a..c0433a82 160000 --- a/submodules/rope +++ b/submodules/rope @@ -1 +1 @@ -Subproject commit 62af070aa5ed3505a2629a76778003ce7fd383f0 +Subproject commit c0433a82503ab4f8103f53d82655a004c6f9a93b diff --git a/submodules/tomli b/submodules/tomli new file mode 160000 index 00000000..7e563eed --- /dev/null +++ b/submodules/tomli @@ -0,0 +1 @@ +Subproject commit 7e563eed5286b5d46b8290a9f56a86d955b23a9a From 149ccf7c5be0753f5e9872c023ab2eeec3442105 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Fri, 25 Nov 2022 09:28:37 -0300 Subject: [PATCH 68/81] Update git submodule reference to use https instead of git@ --- .gitmodules | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.gitmodules b/.gitmodules index 1ef5f423..59d00541 100644 --- a/.gitmodules +++ b/.gitmodules @@ -50,10 +50,10 @@ url = https://github.com/uiri/toml.git [submodule "submodules/pytoolconfig"] path = submodules/pytoolconfig - url = git@github.com:bagel897/pytoolconfig.git + url = https://github.com/bagel897/pytoolconfig.git [submodule "submodules/tomli"] path = submodules/tomli - url = git@github.com:hukkin/tomli.git + url = https://github.com/hukkin/tomli.git [submodule "submodules/appdirs"] path = submodules/appdirs - url = git@github.com:ActiveState/appdirs.git + url = https://github.com/ActiveState/appdirs.git From dedf83eb162f5ba91ed0cbd176fa69625af24859 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Fri, 23 Jun 2023 23:41:26 -0300 Subject: [PATCH 69/81] Bump dependencies --- submodules/astroid | 2 +- submodules/autopep8 | 2 +- submodules/mccabe | 2 +- submodules/pycodestyle | 2 +- submodules/pyflakes | 2 +- submodules/pylama | 2 +- submodules/pylint | 2 +- submodules/rope | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/submodules/astroid b/submodules/astroid index 36dda3fc..8523ba82 160000 --- a/submodules/astroid +++ b/submodules/astroid @@ -1 +1 @@ -Subproject commit 36dda3fc8a5826b19a33a0ff29402b61d6a64fc2 +Subproject commit 8523ba827006d56a770a1f6efa77215718ef26c0 diff --git a/submodules/autopep8 b/submodules/autopep8 index 32c78a3a..6e6d4ba4 160000 --- a/submodules/autopep8 +++ b/submodules/autopep8 @@ -1 +1 @@ -Subproject commit 32c78a3a07d7ee35500e6f20bfcd621f3132c42e +Subproject commit 6e6d4ba4a043da1a56ca0ec7280a7d4f40283215 diff --git a/submodules/mccabe b/submodules/mccabe index 2d4dd943..85185224 160000 --- a/submodules/mccabe +++ b/submodules/mccabe @@ -1 +1 @@ -Subproject commit 2d4dd9435fcb05aaa89ba0392a84cb1d30a87dc9 +Subproject commit 851852240f2fa4453c226ccc5ae88bc03b467388 diff --git a/submodules/pycodestyle b/submodules/pycodestyle index 930e2cad..1063db87 160000 --- a/submodules/pycodestyle +++ b/submodules/pycodestyle @@ -1 +1 @@ -Subproject commit 930e2cad15df3661306740c30a892a6f1902ef1d +Subproject commit 1063db8747e7d4e213160458aa3792e5ec05bc10 diff --git a/submodules/pyflakes b/submodules/pyflakes index 95fe313b..b37f91a1 160000 --- a/submodules/pyflakes +++ b/submodules/pyflakes @@ -1 +1 @@ -Subproject commit 95fe313ba5ca384041472cd171ea60fad910c207 +Subproject commit b37f91a1ae25cfc242d5043985b05159e152091a diff --git a/submodules/pylama b/submodules/pylama index f436ccc6..53ad214d 160000 --- a/submodules/pylama +++ b/submodules/pylama @@ -1 +1 @@ -Subproject commit f436ccc6b55b33381a295ded753e467953cf4379 +Subproject commit 53ad214de0aa9534e59bcd5f97d9d723d16cfdb8 diff --git a/submodules/pylint b/submodules/pylint index 3eb0362d..fc34a4b6 160000 --- a/submodules/pylint +++ b/submodules/pylint @@ -1 +1 @@ -Subproject commit 3eb0362dc42642e3e2774d7523a1e73d71394064 +Subproject commit fc34a4b6abe56f3ac07ca15d846b1c1955545f85 diff --git a/submodules/rope b/submodules/rope index c0433a82..b0c8a5fc 160000 --- a/submodules/rope +++ b/submodules/rope @@ -1 +1 @@ -Subproject commit c0433a82503ab4f8103f53d82655a004c6f9a93b +Subproject commit b0c8a5fc03ecbc94bd85dff46fc8b3f98f26a91e From 25b30ced6e9545a53a6bceb3bde3c5d95630f649 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Fri, 23 Jun 2023 23:49:23 -0300 Subject: [PATCH 70/81] Fix pylama import --- pymode/lint.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pymode/lint.py b/pymode/lint.py index ba187558..c03bd255 100644 --- a/pymode/lint.py +++ b/pymode/lint.py @@ -6,7 +6,7 @@ import os.path -from pylama.lint.extensions import LINTERS +from pylama.lint import LINTERS try: from pylama.lint.pylama_pylint import Linter @@ -35,11 +35,11 @@ def code_check(): # Fixed in v0.9.3: these two parameters may be passed as strings. # DEPRECATE: v:0.10.0: need to be set as lists. if isinstance(env.var('g:pymode_lint_ignore'), str): - raise ValueError ('g:pymode_lint_ignore should have a list type') + raise ValueError('g:pymode_lint_ignore should have a list type') else: ignore = env.var('g:pymode_lint_ignore') if isinstance(env.var('g:pymode_lint_select'), str): - raise ValueError ('g:pymode_lint_select should have a list type') + raise ValueError('g:pymode_lint_select should have a list type') else: select = env.var('g:pymode_lint_select') options = parse_options( From 4ad80be8bb9a0e55422ac0ee9654e9506be00e4a Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 24 Jun 2023 00:16:57 -0300 Subject: [PATCH 71/81] Fix shellcheck in test --- tests/test.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test.sh b/tests/test.sh index fe9fcae1..48345ad5 100755 --- a/tests/test.sh +++ b/tests/test.sh @@ -4,7 +4,7 @@ set -e which vim 1>/dev/null 2>/dev/null -cd $(dirname $0) +cd "$(dirname "$0")" # Source common variables. source ./test_helpers_bash/test_variables.sh @@ -49,7 +49,7 @@ RETURN_CODES=$(cat $VIM_OUTPUT_FILE | grep -i "Return code") echo -e "${RETURN_CODES}" # Exit the script with error if there are any return codes different from 0. -if echo $RETURN_CODES | grep -E "Return code: [1-9]" 1>/dev/null 2>/dev/null +if echo "${RETURN_CODES}" | grep -E "Return code: [1-9]" 1>/dev/null 2>/dev/null then exit 1 else From 74a7c7bf17fa48d46f4265be55ea6bf0e99bfc55 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 24 Jun 2023 00:32:49 -0300 Subject: [PATCH 72/81] Fix import error --- pymode/lint.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pymode/lint.py b/pymode/lint.py index c03bd255..d7d24449 100644 --- a/pymode/lint.py +++ b/pymode/lint.py @@ -65,7 +65,8 @@ def code_check(): return env.stop() if env.options.get('debug'): - from pylama.core import LOGGER, logging + import logging + from pylama.core import LOGGER LOGGER.setLevel(logging.DEBUG) errors = run(path, code='\n'.join(env.curbuf) + '\n', options=options) From d89385a205c23670456723a7f57cdb5e1b1e2acd Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 24 Jun 2023 01:23:32 -0300 Subject: [PATCH 73/81] Fix erros due to pylama Erro class changes --- pymode/lint.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/pymode/lint.py b/pymode/lint.py index d7d24449..c5304043 100644 --- a/pymode/lint.py +++ b/pymode/lint.py @@ -84,11 +84,16 @@ def __sort(e): env.debug("Find sorting: ", sort_rules) errors = sorted(errors, key=__sort) + errors_list = [] for e in errors: - e._info['bufnr'] = env.curbuf.number - if e._info['col'] is None: - e._info['col'] = 1 - - env.run('g:PymodeLocList.current().extend', [e._info for e in errors]) + if e.col is None: + e.col = 1 + err_dict = e.to_dict() + err_dict['bufnr'] = env.curbuf.number + err_dict['type'] = e.etype + err_dict['text'] = e.message + errors_list.append(err_dict) + + env.run('g:PymodeLocList.current().extend', errors_list) # pylama:ignore=W0212,E1103 From d44851ce678d53832f5fc021e1f845eb5290645a Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 24 Jun 2023 10:18:06 -0300 Subject: [PATCH 74/81] Improve tests outputs --- tests/test.sh | 6 +++--- tests/test_bash/test_autocommands.sh | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/test.sh b/tests/test.sh index 48345ad5..acf75076 100755 --- a/tests/test.sh +++ b/tests/test.sh @@ -26,9 +26,9 @@ declare -a TEST_ARRAY=( set +e for ONE_TEST in "${TEST_ARRAY[@]}" do - echo "Starting test: $ONE_TEST" >> $VIM_OUTPUT_FILE - bash -x "$ONE_TEST" - echo -e "\n$ONE_TEST: Return code: $?" >> $VIM_OUTPUT_FILE + echo "Starting test: $ONE_TEST" | tee $VIM_OUTPUT_FILE + bash "$ONE_TEST" + echo -e "\n$ONE_TEST: Return code: $?" | tee $VIM_OUTPUT_FILE bash ./test_helpers_bash/test_prepare_between_tests.sh done diff --git a/tests/test_bash/test_autocommands.sh b/tests/test_bash/test_autocommands.sh index bc46b9d5..9fabebfd 100644 --- a/tests/test_bash/test_autocommands.sh +++ b/tests/test_bash/test_autocommands.sh @@ -19,7 +19,7 @@ declare -a TEST_PYMODE_COMMANDS_ARRAY=( set +e for ONE_PYMODE_COMMANDS_TEST in "${TEST_PYMODE_COMMANDS_ARRAY[@]}" do - echo "Starting test: $0:$ONE_PYMODE_COMMANDS_TEST" >> $VIM_OUTPUT_FILE + echo "Starting test: $0:$ONE_PYMODE_COMMANDS_TEST" | tee $VIM_OUTPUT_FILE RETURN_CODE=$(vim --clean -i NONE -u $VIM_TEST_VIMRC -c "source $ONE_PYMODE_COMMANDS_TEST" $VIM_DISPOSABLE_PYFILE > /dev/null 2>&1) ### Enable the following to execute one test at a time. @@ -27,7 +27,7 @@ do ### FOR PINPOINT TESTING ### exit 1 RETURN_CODE=$? - echo -e "\n$0:$ONE_PYMODE_COMMANDS_TEST: Return code: $RETURN_CODE" >> $VIM_OUTPUT_FILE + echo -e "\n$0:$ONE_PYMODE_COMMANDS_TEST: Return code: $RETURN_CODE" | tee $VIM_OUTPUT_FILE bash ./test_helpers_bash/test_prepare_between_tests.sh done From a4731b47b1847bbf80eb7c5d5a6cee50a2b42b48 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sat, 24 Jun 2023 15:54:15 -0300 Subject: [PATCH 75/81] [Tests] apply shellcheck on shell test files and import tests outputs --- tests/test.sh | 41 ++++++++-------- tests/test_bash/test_autocommands.sh | 12 +++-- tests/test_bash/test_autopep8.sh | 5 +- tests/test_bash/test_folding.sh | 47 +++++++------------ tests/test_bash/test_pymodelint.sh | 9 ++-- tests/test_bash/test_textobject.sh | 12 ++--- tests/test_helpers_bash/test_createvimrc.sh | 45 +++++++++--------- .../test_prepare_between_tests.sh | 9 ++-- tests/test_helpers_bash/test_prepare_once.sh | 4 +- tests/test_helpers_bash/test_variables.sh | 12 +++-- 10 files changed, 97 insertions(+), 99 deletions(-) diff --git a/tests/test.sh b/tests/test.sh index acf75076..eb4ec018 100755 --- a/tests/test.sh +++ b/tests/test.sh @@ -22,38 +22,37 @@ declare -a TEST_ARRAY=( "./test_bash/test_folding.sh" "./test_bash/test_textobject.sh" ) +MAIN_RETURN=0 ## now loop through the above array set +e -for ONE_TEST in "${TEST_ARRAY[@]}" +for TEST in "${TEST_ARRAY[@]}" do - echo "Starting test: $ONE_TEST" | tee $VIM_OUTPUT_FILE - bash "$ONE_TEST" - echo -e "\n$ONE_TEST: Return code: $?" | tee $VIM_OUTPUT_FILE + echo "Starting test: ${TEST}" | tee -a "${VIM_OUTPUT_FILE}" + bash "${TEST}" + R=$? + MAIN_RETURN=$(( MAIN_RETURN + R )) + echo -e "${TEST}: Return code: ${R}\n" | tee -a "${VIM_OUTPUT_FILE}" bash ./test_helpers_bash/test_prepare_between_tests.sh done -# Show errors: -E1=$(grep -E "^E[0-9]+:" $VIM_OUTPUT_FILE) -E2=$(grep -E "^Error" $VIM_OUTPUT_FILE) -E3="$E1\n$E2" -if [ "$E3" = "\n" ] -then - echo "No errors." -else - echo "Errors:" - echo -e "$E3\n" -fi +echo "=========================================================================" +echo " RESULTS" +echo "=========================================================================" # Show return codes. -RETURN_CODES=$(cat $VIM_OUTPUT_FILE | grep -i "Return code") +RETURN_CODES=$(grep -i "Return code" < "${VIM_OUTPUT_FILE}" | grep -v "Return code: 0") echo -e "${RETURN_CODES}" -# Exit the script with error if there are any return codes different from 0. -if echo "${RETURN_CODES}" | grep -E "Return code: [1-9]" 1>/dev/null 2>/dev/null -then - exit 1 +# Show errors: +E1=$(grep -E "^E[0-9]+:" "${VIM_OUTPUT_FILE}") +E2=$(grep -Ei "^Error" "${VIM_OUTPUT_FILE}") +if [[ "${MAIN_RETURN}" == "0" ]]; then + echo "No errors." else - exit 0 + echo "Errors:" + echo -e "${E1}\n${E2}" fi +# Exit the script with error if there are any return codes different from 0. +exit ${MAIN_RETURN} # vim: set fileformat=unix filetype=sh wrap tw=0 : diff --git a/tests/test_bash/test_autocommands.sh b/tests/test_bash/test_autocommands.sh index 9fabebfd..89d8a70d 100644 --- a/tests/test_bash/test_autocommands.sh +++ b/tests/test_bash/test_autocommands.sh @@ -15,20 +15,24 @@ declare -a TEST_PYMODE_COMMANDS_ARRAY=( ### FOR PINPOINT TESTING ### "./test_procedures_vimscript/pymoderun.vim" ### FOR PINPOINT TESTING ### ) +RETURN_CODE=0 + ## now loop through the above array set +e for ONE_PYMODE_COMMANDS_TEST in "${TEST_PYMODE_COMMANDS_ARRAY[@]}" do - echo "Starting test: $0:$ONE_PYMODE_COMMANDS_TEST" | tee $VIM_OUTPUT_FILE - RETURN_CODE=$(vim --clean -i NONE -u $VIM_TEST_VIMRC -c "source $ONE_PYMODE_COMMANDS_TEST" $VIM_DISPOSABLE_PYFILE > /dev/null 2>&1) + CONTENT="$(vim --clean -i NONE -u "${VIM_TEST_VIMRC}" -c "source ${ONE_PYMODE_COMMANDS_TEST}" "${VIM_DISPOSABLE_PYFILE}" 2>&1)" ### Enable the following to execute one test at a time. ### FOR PINPOINT TESTING ### vim --clean -i NONE -u $VIM_TEST_VIMRC -c "source $ONE_PYMODE_COMMANDS_TEST" $VIM_DISPOSABLE_PYFILE ### FOR PINPOINT TESTING ### exit 1 - RETURN_CODE=$? - echo -e "\n$0:$ONE_PYMODE_COMMANDS_TEST: Return code: $RETURN_CODE" | tee $VIM_OUTPUT_FILE + SUB_TEST_RETURN_CODE=$? + echo -e "${CONTENT}" >> "${VIM_OUTPUT_FILE}" + RETURN_CODE=$(( RETURN_CODE + SUB_TEST_RETURN_CODE )) + echo -e "\tSubTest: $0:${ONE_PYMODE_COMMANDS_TEST}: Return code: ${SUB_TEST_RETURN_CODE}" | tee -a "${VIM_OUTPUT_FILE}" bash ./test_helpers_bash/test_prepare_between_tests.sh done +exit ${RETURN_CODE} # vim: set fileformat=unix filetype=sh wrap tw=0 : diff --git a/tests/test_bash/test_autopep8.sh b/tests/test_bash/test_autopep8.sh index 05585725..2a70072a 100644 --- a/tests/test_bash/test_autopep8.sh +++ b/tests/test_bash/test_autopep8.sh @@ -2,9 +2,10 @@ # Source file. set +e -RETURN_CODE=$(vim --clean -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/autopep8.vim" $VIM_DISPOSABLE_PYFILE > /dev/null 2>&1) +CONTENT="$(vim --clean -i NONE -u "${VIM_TEST_VIMRC}" -c "source ./test_procedures_vimscript/autopep8.vim" "${VIM_DISPOSABLE_PYFILE}" 2>&1)" RETURN_CODE=$? +echo -e "${CONTENT}" >> "${VIM_OUTPUT_FILE}" set -e -exit $RETURN_CODE +exit ${RETURN_CODE} # vim: set fileformat=unix filetype=sh wrap tw=0 : diff --git a/tests/test_bash/test_folding.sh b/tests/test_bash/test_folding.sh index d0ac884a..60e60c42 100644 --- a/tests/test_bash/test_folding.sh +++ b/tests/test_bash/test_folding.sh @@ -2,35 +2,24 @@ # Note: a solution with unix 'timeout' program was tried but it was unsuccessful. The problem with folding 4 is that in the case of a crash one expects the folding to just stay in an infinite loop, thus never existing with error. An improvement is suggested to this case. -# Source file. -set +e -source ./test_helpers_bash/test_prepare_between_tests.sh -vim --clean -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/folding1.vim" $VIM_DISPOSABLE_PYFILE > /dev/null -R1=$? -source ./test_helpers_bash/test_prepare_between_tests.sh -vim --clean -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/folding2.vim" $VIM_DISPOSABLE_PYFILE > /dev/null -R2=$? -source ./test_helpers_bash/test_prepare_between_tests.sh -# TODO: enable folding3.vim script back. -# vim --clean -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/folding3.vim" $VIM_DISPOSABLE_PYFILE > /dev/null -# R3=$? -source ./test_helpers_bash/test_prepare_between_tests.sh -vim --clean -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/folding4.vim" $VIM_DISPOSABLE_PYFILE > /dev/null -R4=$? -set -e +declare -a TEST_PYMODE_FOLDING_TESTS_ARRAY=( + "./test_procedures_vimscript/folding1.vim" + "./test_procedures_vimscript/folding2.vim" + # "./test_procedures_vimscript/folding3.vim" + "./test_procedures_vimscript/folding4.vim" + ) + +RETURN_CODE=0 -if [[ "$R1" -ne 0 ]] -then - exit 1 -elif [[ "$R2" -ne 0 ]] -then - exit 2 -# elif [[ "$R3" -ne 0 ]] -# then -# exit 3 -elif [[ "$R4" -ne 0 ]] -then - exit 4 -fi +set +e +for SUB_TEST in "${TEST_PYMODE_FOLDING_TESTS_ARRAY[@]}"; do + CONTENT="$(vim --clean -i NONE -u "${VIM_TEST_VIMRC}" -c "source ${SUB_TEST}" "${VIM_DISPOSABLE_PYFILE}" 2>&1)" + SUB_TEST_RETURN_CODE=$? + echo -e "${CONTENT}" >> "${VIM_OUTPUT_FILE}" + RETURN_CODE=$(( RETURN_CODE + SUB_TEST_RETURN_CODE )) + echo -e "\tSubTest: $0:${SUB_TEST}: Return code: ${SUB_TEST_RETURN_CODE}" | tee -a "${VIM_OUTPUT_FILE}" + bash ./test_helpers_bash/test_prepare_between_tests.sh +done +exit ${RETURN_CODE} # vim: set fileformat=unix filetype=sh wrap tw=0 : diff --git a/tests/test_bash/test_pymodelint.sh b/tests/test_bash/test_pymodelint.sh index 583d0774..9f903955 100644 --- a/tests/test_bash/test_pymodelint.sh +++ b/tests/test_bash/test_pymodelint.sh @@ -5,10 +5,11 @@ # Source file. set +e -vim --clean -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/pymodelint.vim" $VIM_DISPOSABLE_PYFILE -# RETURN_CODE=$(vim --clean -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/pymodeversion.vim" $VIM_DISPOSABLE_PYFILE > /dev/null 2>&1) -# RETURN_CODE=$? +# vim --clean -i NONE -u "${VIM_TEST_VIMRC}" -c "source ./test_procedures_vimscript/pymodelint.vim" "${VIM_DISPOSABLE_PYFILE}" >> "${VIM_OUTPUT_FILE}" 2>&1 +CONTENT="$(vim --clean -i NONE -u "${VIM_TEST_VIMRC}" -c "source ./test_procedures_vimscript/pymodeversion.vim" "${VIM_DISPOSABLE_PYFILE}" 2>&1)" +RETURN_CODE=$? +echo -e "${CONTENT}" >> "${VIM_OUTPUT_FILE}" set -e -# exit $RETURN_CODE +exit ${RETURN_CODE} # vim: set fileformat=unix filetype=sh wrap tw=0 : diff --git a/tests/test_bash/test_textobject.sh b/tests/test_bash/test_textobject.sh index 43a799f9..cf90c87a 100644 --- a/tests/test_bash/test_textobject.sh +++ b/tests/test_bash/test_textobject.sh @@ -2,14 +2,12 @@ # Source file. set +e +# shellcheck source=../test_helpers_bash/test_prepare_between_tests.sh source ./test_helpers_bash/test_prepare_between_tests.sh -vim --clean -i NONE -u $VIM_TEST_VIMRC -c "source ./test_procedures_vimscript/textobject.vim" $VIM_DISPOSABLE_PYFILE > /dev/null -R1=$? +CONTENT="$(vim --clean -i NONE -u "${VIM_TEST_VIMRC}" -c "source ./test_procedures_vimscript/textobject.vim" "${VIM_DISPOSABLE_PYFILE}" 2>&1)" +RETURN_CODE=$? +echo -e "${CONTENT}" >> "${VIM_OUTPUT_FILE}" set -e -if [[ "$R1" -ne 0 ]] -then - exit 1 -fi - +exit ${RETURN_CODE} # vim: set fileformat=unix filetype=sh wrap tw=0 : diff --git a/tests/test_helpers_bash/test_createvimrc.sh b/tests/test_helpers_bash/test_createvimrc.sh index ae763b95..d816df98 100644 --- a/tests/test_helpers_bash/test_createvimrc.sh +++ b/tests/test_helpers_bash/test_createvimrc.sh @@ -1,26 +1,27 @@ #! /bin/bash # Create minimal vimrc. -echo -e "syntax on\nfiletype plugin indent on\nset nocompatible" >> $VIM_TEST_VIMRC -echo "call has('python3')" >> $VIM_TEST_VIMRC -echo "set paste" >> $VIM_TEST_VIMRC -echo "set shortmess=at" >> $VIM_TEST_VIMRC -echo "set cmdheight=10" >> $VIM_TEST_VIMRC -echo "set ft=python" >> $VIM_TEST_VIMRC -echo "set shell=bash" >> $VIM_TEST_VIMRC -echo "set noswapfile" >> $VIM_TEST_VIMRC -echo "set backupdir=" >> $VIM_TEST_VIMRC -echo "set undodir=" >> $VIM_TEST_VIMRC -echo "set viewdir=" >> $VIM_TEST_VIMRC -echo "set directory=" >> $VIM_TEST_VIMRC -echo -e "set runtimepath=" >> $VIM_TEST_VIMRC -echo -e "set runtimepath+=$(dirname $PWD)\n" >> $VIM_TEST_VIMRC -echo -e "set packpath+=/tmp\n" >> $VIM_TEST_VIMRC -# echo -e "redir! >> $VIM_OUTPUT_FILE\n" >> $VIM_TEST_VIMRC -echo -e "set verbosefile=$VIM_OUTPUT_FILE\n" >> $VIM_TEST_VIMRC -echo -e "let g:pymode_debug = 1" >> $VIM_TEST_VIMRC - -echo "set nomore" >> $VIM_TEST_VIMRC - - +cat <<-EOF >> "${VIM_TEST_VIMRC}" + syntax on + filetype plugin indent on + set nocompatible + call has('python3') + set paste + set shortmess=at + set cmdheight=10 + set ft=python + set shell=bash + set noswapfile + set backupdir= + set undodir= + set viewdir= + set directory= + set runtimepath= + set runtimepath+="$(dirname "${PWD}")" + set packpath+=/tmp + " redir! >> "${VIM_OUTPUT_FILE}" + set verbosefile="${VIM_OUTPUT_FILE}" + let g:pymode_debug = 1 + set nomore +EOF # vim: set fileformat=unix filetype=sh wrap tw=0 : diff --git a/tests/test_helpers_bash/test_prepare_between_tests.sh b/tests/test_helpers_bash/test_prepare_between_tests.sh index cdce9869..7a8f52e7 100644 --- a/tests/test_helpers_bash/test_prepare_between_tests.sh +++ b/tests/test_helpers_bash/test_prepare_between_tests.sh @@ -2,11 +2,12 @@ # Prepare tests. set +e -if [ -f $VIM_DISPOSABLE_PYFILE ]; then - rm $VIM_DISPOSABLE_PYFILE +if [ -f "${VIM_DISPOSABLE_PYFILE}" ]; then + rm "${VIM_DISPOSABLE_PYFILE}" fi -export VIM_DISPOSABLE_PYFILE=`mktemp /tmp/pymode.tmpfile.XXXXXXXXXX.py` +VIM_DISPOSABLE_PYFILE="$(mktemp /tmp/pymode.tmpfile.XXXXXXXXXX.py)" +export VIM_DISPOSABLE_PYFILE set -e -touch $VIM_DISPOSABLE_PYFILE +touch "${VIM_DISPOSABLE_PYFILE}" # vim: set fileformat=unix filetype=sh wrap tw=0 : diff --git a/tests/test_helpers_bash/test_prepare_once.sh b/tests/test_helpers_bash/test_prepare_once.sh index dad77182..da986b53 100644 --- a/tests/test_helpers_bash/test_prepare_once.sh +++ b/tests/test_helpers_bash/test_prepare_once.sh @@ -2,11 +2,11 @@ # Prepare tests. set +e -rm $VIM_OUTPUT_FILE $VIM_TEST_VIMRC $VIM_TEST_PYMODECOMMANDS $VIM_DISPOSABLE_PYFILE 2&>/dev/null +rm "${VIM_OUTPUT_FILE}" "${VIM_TEST_VIMRC}" "${VIM_TEST_PYMODECOMMANDS}" "${VIM_DISPOSABLE_PYFILE}" 2&>/dev/null rm /tmp/*pymode* 2&>/dev/null rm -rf /tmp/pack mkdir -p /tmp/pack/test_plugins/start -ln -s $(dirname $(pwd)) /tmp/pack/test_plugins/start/ +ln -s "$(dirname "$(pwd)")" /tmp/pack/test_plugins/start/ set -e # vim: set fileformat=unix filetype=sh wrap tw=0 : diff --git a/tests/test_helpers_bash/test_variables.sh b/tests/test_helpers_bash/test_variables.sh index 53edb5e5..f1995022 100644 --- a/tests/test_helpers_bash/test_variables.sh +++ b/tests/test_helpers_bash/test_variables.sh @@ -3,9 +3,13 @@ # Define variables for common test scripts. # Set variables. -export VIM_DISPOSABLE_PYFILE=`mktemp /tmp/pymode.tmpfile.XXXXXXXXXX.py` -export VIM_OUTPUT_FILE=/tmp/pymode.out -export VIM_TEST_VIMRC=/tmp/pymode_vimrc -export VIM_TEST_PYMODECOMMANDS=/tmp/pymode_commands.txt +VIM_DISPOSABLE_PYFILE="$(mktemp /tmp/pymode.tmpfile.XXXXXXXXXX.py)" +export VIM_DISPOSABLE_PYFILE +VIM_OUTPUT_FILE=/tmp/pymode.out +export VIM_OUTPUT_FILE +VIM_TEST_VIMRC=/tmp/pymode_vimrc +export VIM_TEST_VIMRC +VIM_TEST_PYMODECOMMANDS=/tmp/pymode_commands.txt +export VIM_TEST_PYMODECOMMANDS # vim: set fileformat=unix filetype=sh wrap tw=0 : From a87f7896ea847c91f3b08ea1c95bf0f1043ebaf3 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sun, 2 Jul 2023 22:17:01 -0300 Subject: [PATCH 76/81] Replace pep8 with pycodestyle For more info see: https://github.com/python-mode/python-mode/pull/1170#issuecomment-1610771071 --- doc/pymode.txt | 10 +++++----- plugin/pymode.vim | 8 ++++---- pymode/lint.py | 6 ++++++ tests/utils/pymoderc | 4 ++-- 4 files changed, 17 insertions(+), 11 deletions(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index c8f9dd2b..3565a4c3 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -293,7 +293,7 @@ Manually set breakpoint command (leave empty for automatic detection) 3. Code checking ~ *pymode-lint* -Pymode supports `pylint`, `pep257`, `pep8`, `pyflakes`, `mccabe` code +Pymode supports `pylint`, `pep257`, `pycodestyle`, `pyflakes`, `mccabe` code checkers. You could run several similar checkers. Pymode uses Pylama library for code checking. Many options like skip @@ -330,9 +330,9 @@ Show error message if cursor placed at the error line *'g:pymode_lint_message' Default code checkers (you could set several) *'g:pymode_lint_checkers'* > - let g:pymode_lint_checkers = ['pyflakes', 'pep8', 'mccabe'] + let g:pymode_lint_checkers = ['pyflakes', 'pycodestyle', 'mccabe'] -Values may be chosen from: `pylint`, `pep8`, `mccabe`, `pep257`, `pyflakes`. +Values may be chosen from: `pylint`, `pycodestyle`, `mccabe`, `pep257`, `pyflakes`. Skip errors and warnings *'g:pymode_lint_ignore'* E.g. ["W", "E2"] (Skip all Warnings and the Errors starting with E2) etc. @@ -376,9 +376,9 @@ Definitions for |signs| Pymode has the ability to set code checkers options from pymode variables: -Set PEP8 options *'g:pymode_lint_options_pep8'* +Set PEP8 options *'g:pymode_lint_options_pycodestyle'* > - let g:pymode_lint_options_pep8 = + let g:pymode_lint_options_pycodestyle = \ {'max_line_length': g:pymode_options_max_line_length} See https://pep8.readthedocs.org/en/1.4.6/intro.html#configuration for more diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 232dc2af..82ab95ff 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -122,8 +122,8 @@ call pymode#default("g:pymode_lint_on_fly", 0) " Show message about error in command line call pymode#default("g:pymode_lint_message", 1) -" Choices are: pylint, pyflakes, pep8, mccabe and pep257 -call pymode#default("g:pymode_lint_checkers", ['pyflakes', 'pep8', 'mccabe']) +" Choices are: pylint, pyflakes, pycodestyle, mccabe and pep257 +call pymode#default("g:pymode_lint_checkers", ['pyflakes', 'pycodestyle', 'mccabe']) " Skip errors and warnings (e.g. E4,W) call pymode#default("g:pymode_lint_ignore", []) @@ -152,8 +152,8 @@ call pymode#default("g:pymode_lint_info_symbol", "II") call pymode#default("g:pymode_lint_pyflakes_symbol", "FF") " Code checkers options -" TODO: check if most adequate name name is pep8 or pycodestyle. -call pymode#default("g:pymode_lint_options_pep8", +" TODO: check if most adequate name name is pycodestyle. +call pymode#default("g:pymode_lint_options_pycodestyle", \ {'max_line_length': g:pymode_options_max_line_length}) call pymode#default("g:pymode_lint_options_pylint", diff --git a/pymode/lint.py b/pymode/lint.py index c5304043..b0103a50 100644 --- a/pymode/lint.py +++ b/pymode/lint.py @@ -42,6 +42,12 @@ def code_check(): raise ValueError('g:pymode_lint_select should have a list type') else: select = env.var('g:pymode_lint_select') + if 'pep8' in linters: + # TODO: Add a user visible deprecation warning here + env.message('pep8 linter is deprecated, please use pycodestyle.') + linters.remove('pep8') + linters.append('pycodestyle') + options = parse_options( linters=linters, force=1, ignore=ignore, diff --git a/tests/utils/pymoderc b/tests/utils/pymoderc index 222c6ceb..3a8477ea 100644 --- a/tests/utils/pymoderc +++ b/tests/utils/pymoderc @@ -25,7 +25,7 @@ let g:pymode_lint_on_write = 1 let g:pymode_lint_unmodified = 0 let g:pymode_lint_on_fly = 0 let g:pymode_lint_message = 1 -let g:pymode_lint_checkers = ['pyflakes', 'pep8', 'mccabe'] +let g:pymode_lint_checkers = ['pyflakes', 'pycodestyle', 'mccabe'] let g:pymode_lint_ignore = ["E501", "W",] let g:pymode_lint_select = ["E501", "W0011", "W430"] let g:pymode_lint_sort = [] @@ -37,7 +37,7 @@ let g:pymode_lint_visual_symbol = 'RR' let g:pymode_lint_error_symbol = 'EE' let g:pymode_lint_info_symbol = 'II' let g:pymode_lint_pyflakes_symbol = 'FF' -let g:pymode_lint_options_pep8 = +let g:pymode_lint_options_pycodestyle = \ {'max_line_length': g:pymode_options_max_line_length} let g:pymode_lint_options_pyflakes = { 'builtins': '_' } let g:pymode_lint_options_mccabe = { 'complexity': 12 } From a28ace5bee0ea292be9f979f3c651c47cc39b284 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sun, 2 Jul 2023 22:20:11 -0300 Subject: [PATCH 77/81] Update changelog --- CHANGELOG.md | 36 ++++++++++++++++++++++++++++++++++-- 1 file changed, 34 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 001a9194..f8e3dbf7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,10 +2,36 @@ ## TODO +- Update submodules + - Fix Errors related to these updates +- Improve tests outputs +- Fix Global and Module MoveRefactoring (#1141) Thanks to @lieryan +- Text object/operator/motion mapping to select logical line (#1145). Thanks to + @lieryan +- Remove dead keywords and builtins; add match, case (#1149). Thanks to + @NeilGirdhar +- Add syntax highlight for walrus (#1147) Thanks to @fpob +- Add configurable prefix for rope commands (#1137) TThanks to @NathanTP +- Add option g:pymode_indent_hanging_width for different hanging indentation + width (#1138). Thanks to @wookayin + +## 2020-10-08 0.13.0 + +- Add toml submodule + +## 2020-10-08 0.12.0 + +- Improve breakpoint feature +- Improve debugging script +- Update submodules +- Improve tests + +## 2020-05-28 0.11.0 + - Move changelog rst syntax to markdown - `pymode_rope`: check disables -- Remove supoort for python 2. From 0.11.0 on we will focus on supporting - python 3+ (probably 3.5+). +- BREAKING CHANGE: Remove supoort for python 2. From 0.11.0 on we will focus on + supporting python 3+ (probably 3.5+). - Inspect why files starting with the following code do not get loaded: ```python @@ -16,6 +42,12 @@ main() ``` +- added github actions test suit and remove travis +- improved submodules cloning (shallow) +- Removes `six` submodule +- Fix motion mapping +- Fix breakpoint feature + ## 2019-05-11 0.10.0 After many changes, including moving most of our dependencies from copied From aee5c38a63b5d191ca1e0304903f1aa57256d5a5 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sun, 2 Jul 2023 22:22:56 -0300 Subject: [PATCH 78/81] Update bumpversion --- .bumpversion.cfg | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 613addba..0eda784d 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -8,3 +8,7 @@ tag_name = {new_version} [bumpversion:file:doc/pymode.txt] search = Version: {current_version} replace = Version: {new_version} + +[bumpversion:file:CHANGELOG.md] +search = Version: {current_version} +replace = Version: {new_version} From d69efa5624a60244b94d47d09e7309e0ac04b8e9 Mon Sep 17 00:00:00 2001 From: Diego Rabatone Oliveira Date: Sun, 2 Jul 2023 22:20:47 -0300 Subject: [PATCH 79/81] =?UTF-8?q?Bump=20version:=200.13.0=20=E2=86=92=200.?= =?UTF-8?q?14.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- CHANGELOG.md | 2 ++ doc/pymode.txt | 2 +- plugin/pymode.vim | 2 +- 4 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 0eda784d..84607ec8 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,6 +1,6 @@ [bumpversion] commit = True -current_version = 0.13.0 +current_version = 0.14.0 files = plugin/pymode.vim tag = True tag_name = {new_version} diff --git a/CHANGELOG.md b/CHANGELOG.md index f8e3dbf7..4e7668dd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,8 @@ ## TODO +## 2023-07-02 0.14.0 + - Update submodules - Fix Errors related to these updates - Improve tests outputs diff --git a/doc/pymode.txt b/doc/pymode.txt index 3565a4c3..7235b5d5 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -6,7 +6,7 @@ (__) (__) (__) (_) (_)(_____)(_)\_) (_/\/\_)(_____)(____/(____) ~ - Version: 0.13.0 + Version: 0.14.0 =============================================================================== CONTENTS *pymode-contents* diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 82ab95ff..b0d99270 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -1,5 +1,5 @@ " vi: fdl=1 -let g:pymode_version = "0.13.0" +let g:pymode_version = "0.14.0" " Enable pymode by default :) From afc201a3e7416d95fb3238558b589223ffc0b07f Mon Sep 17 00:00:00 2001 From: Jongwook Choi Date: Thu, 21 Sep 2023 22:45:53 -0400 Subject: [PATCH 80/81] Fix PathFinder.find_module AttributeError for Python 3.12 PathFinder.find_module() has been deprecated since Python 3.4 in favor of find_spec(), and it's finally removed in Python 3.12. This line will throw an AttributeError which makes pymode completely unusable with python 3.12. It was a hacky workaround introduced in #1028. Maybe we can completely remove this workaround because it's 4 years ago and the minimum supported python version is now 3.6+. --- pymode/__init__.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/pymode/__init__.py b/pymode/__init__.py index aba22870..906d7059 100644 --- a/pymode/__init__.py +++ b/pymode/__init__.py @@ -6,7 +6,13 @@ import vim # noqa if not hasattr(vim, 'find_module'): - vim.find_module = _PathFinder.find_module + try: + vim.find_module = _PathFinder.find_module # deprecated + except AttributeError: + def _find_module(package_name): + spec = _PathFinder.find_spec(package_name) + return spec.loader if spec else None + vim.find_module = _find_module def auto(): From d43292ed5edfd19beea41b1b6ca8b69275bd1c38 Mon Sep 17 00:00:00 2001 From: "Sean M. Collins" Date: Thu, 16 May 2024 12:20:30 -0400 Subject: [PATCH 81/81] Move to pycodestyle 2.11.0 https://github.com/PyCQA/flake8/issues/1845#issuecomment-1766073353 --- submodules/pycodestyle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/submodules/pycodestyle b/submodules/pycodestyle index 1063db87..21abd9b6 160000 --- a/submodules/pycodestyle +++ b/submodules/pycodestyle @@ -1 +1 @@ -Subproject commit 1063db8747e7d4e213160458aa3792e5ec05bc10 +Subproject commit 21abd9b6dcbfa38635bc85a2c2327ec11ad91ffc